void DownloadVWAPCSVNew(ZipOutputStream zipStream) { for (int deg = -16; deg <= 16; deg++) { DownloadQuotesEnumerator enumerator; try { enumerator = quoteClient.DownloadVWAPQuotes(symbol, (short)deg, from, to, 500); } catch { continue; } char sign = deg >= 0 ? '+' : '-'; string filename = $"ticks vwap e{(Math.Sign(deg) == -1 ? '-' : '+')}{Math.Abs(deg):d2}.csv"; using (StreamWriter file = File.CreateText(filename)) { currentTempFile = filename; DateTime lastQuoteTime = DateTime.MinValue; int repeatingQuotes = 0; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy.MM.dd HH:mm:ss.fff", CultureInfo.InvariantCulture)); if (quote.CreatingTime == lastQuoteTime) { repeatingQuotes++; } else { lastQuoteTime = quote.CreatingTime; repeatingQuotes = 0; } if (repeatingQuotes > 0) { builder.Append(-repeatingQuotes); } builder.Append(","); if (quote.Bids.Count != 0) { builder.Append($"{quote.Bids[0].Price},{quote.Bids[0].Volume},"); } else { builder.Append(",,"); } if (quote.Asks.Count != 0) { builder.Append($"{quote.Asks[0].Price},{quote.Asks[0].Volume},"); } else { builder.Append(",,"); } builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } FileInfo fi = new FileInfo(filename); ZipEntry newEntry = new ZipEntry(filename); newEntry.Size = fi.Length; zipStream.PutNextEntry(newEntry); byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); File.Delete(filename); currentTempFile = null; } }
protected override bool read(BinaryReader source, MetaDataIO.ReadTagParams readTagParams) { byte[] header; string trigger; IList <MidiTrack> tracks = new List <MidiTrack>(); resetData(); // Ignores everything (comments) written before the MIDI header /* * trigger = ""; * while (trigger != MIDI_FILE_HEADER) * { * trigger += new String(StreamUtils.ReadOneByteChars(source, 1)); * if (trigger.Length > 4) trigger = trigger.Remove(0, 1); * } */ StreamUtils.FindSequence(source.BaseStream, Utils.Latin1Encoding.GetBytes(MIDI_FILE_HEADER)); // Ready to read header data... header = source.ReadBytes(10); if ((header[0] != 0) || (header[1] != 0) || (header[2] != 0) || (header[3] != 6) ) { Logging.LogDelegator.GetLogDelegate()(Log.LV_ERROR, "Wrong MIDI header"); return(false); } type = header[5]; // MIDI STRUCTURE TYPE // 0 - single-track // 1 - multiple tracks, synchronous // 2 - multiple tracks, asynchronous if (type > 1) { Logging.LogDelegator.GetLogDelegate()(Log.LV_WARNING, "SMF type 2 MIDI files are partially supported; results may be approximate"); } tagExists = true; timebase = (header[8] << 8) + header[9]; tempo = 0; // maybe (hopefully!) overwritten by parseTrack int trackSize = 0; int nbTrack = 0; comment = new StringBuilder(""); // Ready to read track data... while (source.BaseStream.Position < sizeInfo.FileSize - 4) { trigger = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); if (trigger != MIDI_TRACK_HEADER) { source.BaseStream.Seek(-3, SeekOrigin.Current); if (!StreamUtils.FindSequence(source.BaseStream, Utils.Latin1Encoding.GetBytes(MIDI_TRACK_HEADER))) { break; } } // trackSize is stored in big endian -> needs inverting trackSize = StreamUtils.DecodeBEInt32(source.ReadBytes(4)); tracks.Add(parseTrack(source.ReadBytes(trackSize), nbTrack)); nbTrack++; } this.tracks = tracks; if (comment.Length > 0) { comment.Remove(comment.Length - 1, 1); } tagData.IntegrateValue(TagData.TAG_FIELD_COMMENT, comment.ToString()); duration = getDuration(); bitrate = (double)sizeInfo.FileSize / duration; return(true); }
private void Export_Click(object sender, EventArgs e) { //acquire target using var sfd = new SaveFileDialog { Filter = new FilesystemFilter("Renoise Song Files", new[] { "xrns" }).ToString() }; if (sfd.ShowDialog().IsOk()) { return; } // configuration: var outPath = sfd.FileName; string templatePath = Path.Combine(Path.GetDirectoryName(outPath) ?? "", "template.xrns"); int configuredPatternLength = int.Parse(txtPatternLength.Text); // load template var msSongXml = new MemoryStream(); var zfTemplate = new ZipFile(templatePath); { using var zis = zfTemplate.GetInputStream(zfTemplate.GetEntry("Song.xml")); byte[] buffer = new byte[4096]; // 4K is optimum StreamUtils.Copy(zis, msSongXml, buffer); } var templateRoot = XElement.Parse(Encoding.UTF8.GetString(msSongXml.ToArray())); //get the pattern pool, and whack the child nodes var xPatterns = templateRoot.XPathSelectElement("//Patterns"); var xPatternPool = xPatterns.Parent; xPatterns.Remove(); var writer = new StringWriter(); writer.WriteLine("<Patterns>"); int pulse0LastNote = -1; int pulse0LastType = -1; int pulse1LastNote = -1; int pulse1LastType = -1; int triLastNote = -1; int noiseLastNote = -1; int patternCount = 0; int time = 0; while (time < _log.Count) { patternCount++; //begin writing pattern: open the tracks list writer.WriteLine("<Pattern>"); writer.WriteLine("<NumberOfLines>{0}</NumberOfLines>", configuredPatternLength); writer.WriteLine("<Tracks>"); //write the pulse tracks for (int track = 0; track < 2; track++) { writer.WriteLine("<PatternTrack type=\"PatternTrack\">"); writer.WriteLine("<Lines>"); int lastNote = track == 0 ? pulse0LastNote : pulse1LastNote; int lastType = track == 0 ? pulse0LastType : pulse1LastType; for (int i = 0; i < configuredPatternLength; i++) { int patLine = i; int index = i + time; if (index >= _log.Count) { continue; } var rec = _log[index]; PulseState pulse = new PulseState(); if (track == 0) { pulse = rec.Pulse0; } if (track == 1) { pulse = rec.Pulse1; } // transform quieted notes to dead notes // blech its buggy, im tired ////if (pulse.vol == 0) //// pulse.en = false; bool keyOff = false, keyOn = false; if (lastNote != -1 && !pulse.En) { lastNote = -1; lastType = -1; keyOff = true; } else if (lastNote != pulse.Note && pulse.En) { keyOn = true; } if (lastType != pulse.Type && pulse.Note != -1) { keyOn = true; } if (pulse.En) { lastNote = pulse.Note; lastType = pulse.Type; } writer.WriteLine("<Line index=\"{0}\">", patLine); writer.WriteLine("<NoteColumns>"); writer.WriteLine("<NoteColumn>"); if (keyOn) { writer.WriteLine("<Note>{0}</Note>", NameForNote(pulse.Note)); writer.WriteLine("<Instrument>{0:X2}</Instrument>", pulse.Type); } else if (keyOff) { writer.WriteLine("<Note>OFF</Note>"); } if (lastNote != -1) { writer.WriteLine("<Volume>{0:X2}</Volume>", pulse.Vol * 8); } writer.WriteLine("</NoteColumn>"); writer.WriteLine("</NoteColumns>"); writer.WriteLine("</Line>"); } // close PatternTrack writer.WriteLine("</Lines>"); writer.WriteLine("</PatternTrack>"); if (track == 0) { pulse0LastNote = lastNote; pulse0LastType = lastType; } else { pulse1LastNote = lastNote; pulse1LastType = lastType; } } // pulse tracks loop // triangle track generation { writer.WriteLine("<PatternTrack type=\"PatternTrack\">"); writer.WriteLine("<Lines>"); for (int i = 0; i < configuredPatternLength; i++) { int patLine = i; int index = i + time; if (index >= _log.Count) { continue; } var rec = _log[index]; TriangleState tri = rec.Triangle; { bool keyOff = false, keyOn = false; if (triLastNote != -1 && !tri.En) { triLastNote = -1; keyOff = true; } else if (triLastNote != tri.Note && tri.En) { keyOn = true; } if (tri.En) { triLastNote = tri.Note; } writer.WriteLine("<Line index=\"{0}\">", patLine); writer.WriteLine("<NoteColumns>"); writer.WriteLine("<NoteColumn>"); if (keyOn) { writer.WriteLine("<Note>{0}</Note>", NameForNote(tri.Note)); writer.WriteLine("<Instrument>08</Instrument>"); } else if (keyOff) { writer.WriteLine("<Note>OFF</Note>"); } // no need for tons of these ////if(keyon) writer.WriteLine("<Volume>80</Volume>"); writer.WriteLine("</NoteColumn>"); writer.WriteLine("</NoteColumns>"); writer.WriteLine("</Line>"); } } // close PatternTrack writer.WriteLine("</Lines>"); writer.WriteLine("</PatternTrack>"); } // noise track generation { writer.WriteLine("<PatternTrack type=\"PatternTrack\">"); writer.WriteLine("<Lines>"); for (int i = 0; i < configuredPatternLength; i++) { int patLine = i; int index = i + time; if (index >= _log.Count) { continue; } var rec = _log[index]; NoiseState noise = rec.Noise; // transform quieted notes to dead notes // blech its buggy, im tired ////if (noise.vol == 0) //// noise.en = false; { bool keyOff = false, keyOn = false; if (noiseLastNote != -1 && !noise.En) { noiseLastNote = -1; keyOff = true; } else if (noiseLastNote != noise.Note && noise.En) { keyOn = true; } if (noise.En) { noiseLastNote = noise.Note; } writer.WriteLine("<Line index=\"{0}\">", patLine); writer.WriteLine("<NoteColumns>"); writer.WriteLine("<NoteColumn>"); if (keyOn) { writer.WriteLine("<Note>{0}</Note>", NameForNote(noise.Note)); writer.WriteLine("<Instrument>04</Instrument>"); } else if (keyOff) { writer.WriteLine("<Note>OFF</Note>"); } if (noiseLastNote != -1) { writer.WriteLine("<Volume>{0:X2}</Volume>", noise.Vol * 8); } writer.WriteLine("</NoteColumn>"); writer.WriteLine("</NoteColumns>"); writer.WriteLine("</Line>"); } } // close PatternTrack writer.WriteLine("</Lines>"); writer.WriteLine("</PatternTrack>"); } // noise track generation // write empty track for now for pcm for (int track = 4; track < 5; track++) { writer.WriteLine("<PatternTrack type=\"PatternTrack\">"); writer.WriteLine("<Lines>"); writer.WriteLine("</Lines>"); writer.WriteLine("</PatternTrack>"); } // we definitely need a dummy master track now writer.WriteLine("<PatternMasterTrack type=\"PatternMasterTrack\">"); writer.WriteLine("</PatternMasterTrack>"); // close tracks writer.WriteLine("</Tracks>"); // close pattern writer.WriteLine("</Pattern>"); time += configuredPatternLength; } // main pattern loop writer.WriteLine("</Patterns>"); writer.Flush(); var xNewPatternList = XElement.Parse(writer.ToString()); xPatternPool.Add(xNewPatternList); //write pattern sequence writer = new StringWriter(); writer.WriteLine("<SequenceEntries>"); for (int i = 0; i < patternCount; i++) { writer.WriteLine("<SequenceEntry>"); writer.WriteLine("<IsSectionStart>false</IsSectionStart>"); writer.WriteLine("<Pattern>{0}</Pattern>", i); writer.WriteLine("</SequenceEntry>"); } writer.WriteLine("</SequenceEntries>"); var xPatternSequence = templateRoot.XPathSelectElement("//PatternSequence"); xPatternSequence.XPathSelectElement("SequenceEntries").Remove(); xPatternSequence.Add(XElement.Parse(writer.ToString())); //copy template file to target File.Delete(outPath); File.Copy(templatePath, outPath); var msOutXml = new MemoryStream(); templateRoot.Save(msOutXml); msOutXml.Flush(); msOutXml.Position = 0; var zfOutput = new ZipFile(outPath); zfOutput.BeginUpdate(); zfOutput.Add(new Stupid { Stream = msOutXml }, "Song.xml"); zfOutput.CommitUpdate(); zfOutput.Close(); // for easier debugging, write patterndata XML ////DUMP_TO_DISK(msOutXml.ToArray()) }
private void writeKML(string filename) { SharpKml.Dom.AltitudeMode altmode = SharpKml.Dom.AltitudeMode.Absolute; if (MainV2.cs.firmware == MainV2.Firmwares.ArduPlane || MainV2.cs.firmware == MainV2.Firmwares.ArduRover) { altmode = SharpKml.Dom.AltitudeMode.Absolute; } else if (MainV2.cs.firmware == MainV2.Firmwares.ArduCopter2) { altmode = SharpKml.Dom.AltitudeMode.RelativeToGround; } Color[] colours = { Color.Red, Color.Orange, Color.Yellow, Color.Green, Color.Blue, Color.Indigo, Color.Violet, Color.Pink }; Document kml = new Document(); Tour tour = new Tour() { Name = "First Person View" }; Playlist tourplaylist = new Playlist(); AddNamespace(kml, "gx", "http://www.google.com/kml/ext/2.2"); Style style = new Style(); style.Id = "yellowLineGreenPoly"; style.Line = new LineStyle(new Color32(HexStringToColor("7f00ffff")), 4); PolygonStyle pstyle = new PolygonStyle(); pstyle.Color = new Color32(HexStringToColor("7f00ff00")); style.Polygon = pstyle; kml.AddStyle(style); Style stylet = new Style(); stylet.Id = "track"; SharpKml.Dom.IconStyle ico = new SharpKml.Dom.IconStyle(); LabelStyle lst = new LabelStyle(); lst.Scale = 0; stylet.Icon = ico; ico.Icon = new IconStyle.IconLink(new Uri("http://earth.google.com/images/kml-icons/track-directional/track-none.png")); stylet.Icon.Scale = 0.5; stylet.Label = lst; kml.AddStyle(stylet); // create sub folders Folder planes = new Folder(); planes.Name = "Planes"; kml.AddFeature(planes); Folder points = new Folder(); points.Name = "Points"; kml.AddFeature(points); // coords for line string CoordinateCollection coords = new CoordinateCollection(); int a = 1; int c = -1; DateTime lasttime = DateTime.MaxValue; DateTime starttime = DateTime.MinValue; Color stylecolor = Color.AliceBlue; string mode = ""; if (flightdata.Count > 0) { mode = flightdata[0].mode; } foreach (CurrentState cs in flightdata) { progressBar1.Value = 50 + (int)((float)a / (float)flightdata.Count * 100.0f / 2.0f); progressBar1.Refresh(); if (starttime == DateTime.MinValue) { starttime = cs.datetime; lasttime = cs.datetime; } if (mode != cs.mode || flightdata.Count == a) { c++; LineString ls = new LineString(); ls.AltitudeMode = altmode; ls.Extrude = true; ls.Coordinates = coords; Placemark pm = new Placemark(); pm.Name = c + " Flight Path " + mode; pm.StyleUrl = new Uri("#yellowLineGreenPoly", UriKind.Relative); pm.Geometry = ls; SharpKml.Dom.TimeSpan ts = new SharpKml.Dom.TimeSpan(); ts.Begin = starttime; ts.End = cs.datetime; pm.Time = ts; // setup for next line mode = cs.mode; starttime = cs.datetime; stylecolor = colours[c % (colours.Length - 1)]; Style style2 = new Style(); style2.Line = new LineStyle(new Color32(stylecolor), 4); pm.StyleSelector = style2; kml.AddFeature(pm); coords = new CoordinateCollection(); } coords.Add(new Vector(cs.lat, cs.lng, cs.alt)); SharpKml.Dom.Timestamp tstamp = new SharpKml.Dom.Timestamp(); tstamp.When = cs.datetime; FlyTo flyto = new FlyTo(); flyto.Duration = (cs.datetime - lasttime).TotalMilliseconds / 1000.0; flyto.Mode = FlyToMode.Smooth; SharpKml.Dom.Camera cam = new SharpKml.Dom.Camera(); cam.AltitudeMode = altmode; cam.Latitude = cs.lat; cam.Longitude = cs.lng; cam.Altitude = cs.alt; cam.Heading = cs.yaw; cam.Roll = -cs.roll; cam.Tilt = (90 - (cs.pitch * -1)); cam.GXTimePrimitive = tstamp; flyto.View = cam; //if (Math.Abs(flyto.Duration.Value) > 0.1) { tourplaylist.AddTourPrimitive(flyto); lasttime = cs.datetime; } Placemark pmplane = new Placemark(); pmplane.Name = "Point " + a; pmplane.Time = tstamp; pmplane.Visibility = false; SharpKml.Dom.Location loc = new SharpKml.Dom.Location(); loc.Latitude = cs.lat; loc.Longitude = cs.lng; loc.Altitude = cs.alt; if (loc.Altitude < 0) { loc.Altitude = 0.01; } SharpKml.Dom.Orientation ori = new SharpKml.Dom.Orientation(); ori.Heading = cs.yaw; ori.Roll = -cs.roll; ori.Tilt = -cs.pitch; SharpKml.Dom.Scale sca = new SharpKml.Dom.Scale(); sca.X = 2; sca.Y = 2; sca.Z = 2; Model model = new Model(); model.Location = loc; model.Orientation = ori; model.AltitudeMode = altmode; model.Scale = sca; try { Description desc = new Description(); desc.Text = @"<![CDATA[ <table> <tr><td>Roll: " + model.Orientation.Roll.Value.ToString("0.00") + @" </td></tr> <tr><td>Pitch: " + model.Orientation.Tilt.Value.ToString("0.00") + @" </td></tr> <tr><td>Yaw: " + model.Orientation.Heading.Value.ToString("0.00") + @" </td></tr> <tr><td>Time: " + cs.datetime.ToString("HH:mm:sszzzzzz") + @" </td></tr> </table> "; // ]]>"; pmplane.Description = desc; } catch { } SharpKml.Dom.Link link = new SharpKml.Dom.Link(); link.Href = new Uri("block_plane_0.dae", UriKind.Relative); model.Link = link; pmplane.Geometry = model; planes.AddFeature(pmplane); /// Placemark pmt = new Placemark(); SharpKml.Dom.Point pnt = new SharpKml.Dom.Point(); pnt.AltitudeMode = altmode; pnt.Coordinate = new Vector(cs.lat, cs.lng, cs.alt); pmt.Name = "" + a; pmt.Description = pmplane.Description; pmt.Time = tstamp; pmt.Geometry = pnt; pmt.StyleUrl = new Uri("#track", UriKind.Relative); points.AddFeature(pmt); a++; } tour.Playlist = tourplaylist; kml.AddFeature(tour); Serializer serializer = new Serializer(); serializer.Serialize(kml); //Console.WriteLine(serializer.Xml); StreamWriter sw = new StreamWriter(filename); sw.Write(serializer.Xml); sw.Close(); // create kmz - aka zip file FileStream fs = File.Open(filename.Replace(Path.GetExtension(filename), ".kmz"), FileMode.Create); ZipOutputStream zipStream = new ZipOutputStream(fs); zipStream.SetLevel(9); //0-9, 9 being the highest level of compression zipStream.UseZip64 = UseZip64.Off; // older zipfile // entry 1 string entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction ZipEntry newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs byte[] buffer = new byte[4096]; using (FileStream streamReader = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); filename = Path.GetDirectoryName(Application.ExecutablePath) + Path.DirectorySeparatorChar + "block_plane_0.dae"; // entry 2 entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); zipStream.IsStreamOwner = true; // Makes the Close also Close the underlying stream zipStream.Close(); }
public void writeKML(string filename) { try { writeGPX(filename); } catch { } try { writeRinex(filename); } catch { } try { writeWPFile(filename); } catch { } try { writeParamFile(filename); } catch { } Color[] colours = { Color.Red, Color.Orange, Color.Yellow, Color.Green, Color.Blue, Color.Indigo, Color.Violet, Color.Pink }; AltitudeMode altmode = AltitudeMode.absolute; KMLRoot kml = new KMLRoot(); Folder fldr = new Folder("Log"); Style style = new Style(); style.Id = "yellowLineGreenPoly"; style.Add(new LineStyle(HexStringToColor("7f00ffff"), 4)); Style style1 = new Style(); style1.Id = "spray"; style1.Add(new LineStyle(HexStringToColor("4c0000ff"), 0)); style1.Add(new PolyStyle() { Color = HexStringToColor("4c0000ff") }); PolyStyle pstyle = new PolyStyle(); pstyle.Color = HexStringToColor("7f00ff00"); style.Add(pstyle); kml.Document.AddStyle(style); kml.Document.AddStyle(style1); int stylecode = 0xff; int g = -1; foreach (List <Point3D> poslist in position) { g++; if (poslist == null) { continue; } /* * List<PointLatLngAlt> pllalist = new List<PointLatLngAlt>(); * * foreach (var point in poslist) * { * pllalist.Add(new PointLatLngAlt(point.Y, point.X, point.Z, "")); * } * * var ans = Utilities.LineOffset.GetPolygon(pllalist, 2); * * * * while (ans.Count > 0) * { * var first = ans[0]; * var second = ans[1]; * var secondlast = ans[ans.Count - 2]; * var last = ans[ans.Count-1]; * * ans.Remove(first); * ans.Remove(last); * * var polycoords = new BoundaryIs(); * * polycoords.LinearRing = new LinearRing(); * * polycoords.LinearRing.Coordinates.Add(new Point3D(first.Lng, first.Lat, 1)); * polycoords.LinearRing.Coordinates.Add(new Point3D(second.Lng, second.Lat, 1)); * polycoords.LinearRing.Coordinates.Add(new Point3D(secondlast.Lng, secondlast.Lat, 1)); * polycoords.LinearRing.Coordinates.Add(new Point3D(last.Lng, last.Lat, 1)); * polycoords.LinearRing.Coordinates.Add(new Point3D(first.Lng, first.Lat, 1)); * * //if (!IsClockwise(polycoords.LinearRing.Coordinates)) * // polycoords.LinearRing.Coordinates.Reverse(); * * Polygon kmlpoly = new Polygon() { AltitudeMode = AltitudeMode.relativeToGround, Extrude = false, OuterBoundaryIs = polycoords }; * * Placemark pmpoly = new Placemark(); * pmpoly.Polygon = kmlpoly; * pmpoly.name = g + " test"; * pmpoly.styleUrl = "#spray"; * * fldr.Add(pmpoly); * } */ LineString ls = new LineString(); ls.AltitudeMode = altmode; ls.Extrude = true; //ls.Tessellate = true; Coordinates coords = new Coordinates(); coords.AddRange(poslist); ls.coordinates = coords; Placemark pm = new Placemark(); string mode = ""; if (g < modelist.Count) { mode = modelist[g]; } pm.name = g + " Flight Path " + mode; pm.styleUrl = "#yellowLineGreenPoly"; pm.LineString = ls; stylecode = colours[g % (colours.Length - 1)].ToArgb(); Style style2 = new Style(); Color color = Color.FromArgb(0xff, (stylecode >> 16) & 0xff, (stylecode >> 8) & 0xff, (stylecode >> 0) & 0xff); log.Info("colour " + color.ToArgb().ToString("X") + " " + color.ToString()); style2.Add(new LineStyle(color, 4)); pm.AddStyle(style2); fldr.Add(pm); } Placemark pmPOS = new Placemark(); pmPOS.name = "POS Message"; pmPOS.LineString = new LineString(); pmPOS.LineString.coordinates = new Coordinates(); Point3D lastPoint3D = new Point3D(); PointLatLngAlt lastplla = PointLatLngAlt.Zero; foreach (var item in PosLatLngAlts) { var newpoint = new Point3D(item.Lng, item.Lat, item.Alt); if (item.GetDistance(lastplla) < 0.1 && lastPoint3D.Z >= (newpoint.Z - 0.3) && lastPoint3D.Z <= (newpoint.Z + 0.3)) { continue; } pmPOS.LineString.coordinates.Add(newpoint); lastPoint3D = newpoint; lastplla = item; if (pmPOS.LineString.coordinates.Count > 20000) { //add current pmPOS.AddStyle(style); fldr.Add(pmPOS); // create new pmPOS = new Placemark(); pmPOS.name = "POS Message - extra"; pmPOS.LineString = new LineString(); pmPOS.LineString.coordinates = new Coordinates(); lastPoint3D = new Point3D(); lastplla = PointLatLngAlt.Zero; } } pmPOS.AddStyle(style); fldr.Add(pmPOS); Folder planes = new Folder(); planes.name = "Planes"; fldr.Add(planes); Folder waypoints = new Folder(); waypoints.name = "Waypoints"; fldr.Add(waypoints); LineString lswp = new LineString(); lswp.AltitudeMode = AltitudeMode.relativeToGround; lswp.Extrude = true; Coordinates coordswp = new Coordinates(); int lastwp = 0; foreach (var line in cmdraw) { var item = dflog.GetDFItemFromLine(line, 0); if (int.Parse(item["CId"]) <= (int)MAVLink.MAV_CMD.LAST) // wps { var wpno = int.Parse(item["CNum"]); if (wpno < lastwp) { lswp.coordinates = coordswp; Placemark pmwp = new Placemark(); pmwp.name = "Waypoints "; //pm.styleUrl = "#yellowLineGreenPoly"; pmwp.LineString = lswp; if (coordswp.Count > 0) { waypoints.Add(pmwp); } lswp = new LineString(); lswp.AltitudeMode = AltitudeMode.relativeToGround; lswp.Extrude = true; coordswp = new Coordinates(); } lastwp = wpno; var lng = double.Parse(item["Lng"], CultureInfo.InvariantCulture); var lat = double.Parse(item["Lat"], CultureInfo.InvariantCulture); var alt = double.Parse(item["Alt"], CultureInfo.InvariantCulture); if (wpno == 0) { alt = 0; } if (lat == 0 && lng == 0) { continue; } coordswp.Add(new Point3D(lng, lat, alt)); } } lswp.coordinates = coordswp; Placemark pmwp2 = new Placemark(); pmwp2.name = "Waypoints"; //pm.styleUrl = "#yellowLineGreenPoly"; pmwp2.LineString = lswp; if (coordswp.Count > 0) { waypoints.Add(pmwp2); } int a = 0; int l = -1; Model lastmodel = null; foreach (Data mod in flightdata) { l++; if (mod.model.Location.latitude == 0) { continue; } if (lastmodel != null) { if (lastmodel.Location.Equals(mod.model.Location)) { continue; } } Placemark pmplane = new Placemark(); pmplane.name = "Plane " + a; pmplane.visibility = false; Model model = mod.model; model.AltitudeMode = altmode; model.Scale.x = 2; model.Scale.y = 2; model.Scale.z = 2; try { pmplane.description = @"<![CDATA[ <table> <tr><td>Roll: " + model.Orientation.roll + @" </td></tr> <tr><td>Pitch: " + model.Orientation.tilt + @" </td></tr> <tr><td>Yaw: " + model.Orientation.heading + @" </td></tr> <tr><td>WP dist " + mod.ntun[2] + @" </td></tr> <tr><td>tar bear " + mod.ntun[3] + @" </td></tr> <tr><td>nav bear " + mod.ntun[4] + @" </td></tr> <tr><td>alt error " + mod.ntun[5] + @" </td></tr> </table> ]]>"; } catch { } try { pmplane.Point = new KmlPoint((float)model.Location.longitude, (float)model.Location.latitude, (float)model.Location.altitude); pmplane.Point.AltitudeMode = altmode; Link link = new Link(); link.href = "block_plane_0.dae"; model.Link = link; pmplane.Model = model; planes.Add(pmplane); } catch { } // bad lat long value lastmodel = mod.model; a++; } kml.Document.Add(fldr); kml.Save(filename); // create kmz - aka zip file FileStream fs = File.Open(filename.ToLower().Replace(".log.kml", ".kmz").Replace(".bin.kml", ".kmz"), FileMode.Create); ZipOutputStream zipStream = new ZipOutputStream(fs); zipStream.SetLevel(9); //0-9, 9 being the highest level of compression zipStream.UseZip64 = UseZip64.Off; // older zipfile // entry 1 string entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction ZipEntry newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); File.Delete(filename); filename = Settings.GetRunningDirectory() + "block_plane_0.dae"; // entry 2 entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); zipStream.IsStreamOwner = true; // Makes the Close also Close the underlying stream zipStream.Close(); positionindex = 0; modelist.Clear(); flightdata.Clear(); position = new List <Core.Geometry.Point3D> [200]; cmdraw.Clear(); }
/// <summary> /// <para>Loads TAI from given Zip-File and Entry and creates & loads Sprites from it.</para> /// </summary> private IEnumerable <CluwneSprite> LoadSpritesFrom(ZipFile zipFile, ZipEntry taiEntry) { string ResourceName = Path.GetFileNameWithoutExtension(taiEntry.Name).ToLowerInvariant(); var loadedSprites = new List <CluwneSprite>(); var byteBuffer = new byte[zipBufferSize]; Stream zipStream = zipFile.GetInputStream(taiEntry); //Will throw exception is missing or wrong password. Handle this. var memStream = new MemoryStream(); StreamUtils.Copy(zipStream, memStream, byteBuffer); memStream.Position = 0; var taiReader = new StreamReader(memStream, true); string loadedTAI = taiReader.ReadToEnd(); memStream.Close(); zipStream.Close(); taiReader.Close(); memStream.Dispose(); zipStream.Dispose(); taiReader.Dispose(); string[] splitContents = Regex.Split(loadedTAI, "\r\n"); //Split by newlines. foreach (string line in splitContents) { if (String.IsNullOrWhiteSpace(line)) { continue; } string[] splitLine = line.Split(','); string[] fullPath = Regex.Split(splitLine[0], "\t"); string PlatformPathname = SS14.Shared.Utility.PlatformTools.SanePath(fullPath[0]); string originalName = Path.GetFileNameWithoutExtension(PlatformPathname).ToLowerInvariant(); //The name of the original picture without extension, before it became part of the atlas. //This will be the name we can find this under in our Resource lists. string[] splitResourceName = fullPath[2].Split('.'); string imageName = splitResourceName[0].ToLowerInvariant(); if (!TextureCache.Textures.Contains(splitResourceName[0])) { continue; //Image for this sprite does not exist. Possibly set to defered later. } Texture atlasTex = TextureCache.Textures[splitResourceName[0]]; //Grab the image for the sprite from the cache. var info = new SpriteInfo(); info.Name = originalName; float offsetX = 0; float offsetY = 0; float sizeX = 0; float sizeY = 0; if (splitLine.Length > 8) //Separated with ','. This causes some problems and happens on some EU PCs. { offsetX = float.Parse(splitLine[3] + "." + splitLine[4], CultureInfo.InvariantCulture); offsetY = float.Parse(splitLine[5] + "." + splitLine[6], CultureInfo.InvariantCulture); sizeX = float.Parse(splitLine[8] + "." + splitLine[9], CultureInfo.InvariantCulture); sizeY = float.Parse(splitLine[10] + "." + splitLine[11], CultureInfo.InvariantCulture); } else { offsetX = float.Parse(splitLine[3], CultureInfo.InvariantCulture); offsetY = float.Parse(splitLine[4], CultureInfo.InvariantCulture); sizeX = float.Parse(splitLine[6], CultureInfo.InvariantCulture); sizeY = float.Parse(splitLine[7], CultureInfo.InvariantCulture); } info.Offsets = new Vector2((float)Math.Round(offsetX * atlasTex.Size.X, 1), (float)Math.Round(offsetY * atlasTex.Size.Y, 1)); info.Size = new Vector2((float)Math.Round(sizeX * atlasTex.Size.X, 1), (float)Math.Round(sizeY * atlasTex.Size.Y, 1)); if (!_spriteInfos.ContainsKey(originalName)) { _spriteInfos.Add(originalName, info); } loadedSprites.Add(new CluwneSprite(originalName, atlasTex, new IntRect((int)info.Offsets.X, (int)info.Offsets.Y, (int)info.Size.X, (int)info.Size.Y))); } return(loadedSprites); }
/// <summary> /// Extracts the ZIP file. /// </summary> /// <param name="recursive">ZIP files within the extracted one, will be also extracted.</param> /// <param name="output">Destination folder for the extracted files.</param> /// <param name="password">ZIP file's password.</param> public void Extract(bool recursive = false, string output = null, string password = null) { output ??= Path.GetDirectoryName(FilePath); if (!Directory.Exists(output)) { throw new DirectoryNotFoundException(); } //Source: https://github.com/icsharpcode/SharpZipLib/wiki/Unpack-a-Zip-with-full-control-over-the-operation using (var zf = new ZipFile(ZipFile)){ if (!String.IsNullOrEmpty(password)) { zf.Password = password; } foreach (ZipEntry zipEntry in zf) { if (!zipEntry.IsFile) { continue; } var entryFileName = zipEntry.Name; var fullZipToPath = Path.Combine(output, entryFileName); var directoryName = Path.GetDirectoryName(fullZipToPath); if (directoryName.Length > 0) { Directory.CreateDirectory(directoryName); } var buffer = new byte[4096]; using (var zipStream = zf.GetInputStream(zipEntry)) using (Stream fsOutput = File.Create(fullZipToPath)) { StreamUtils.Copy(zipStream, fsOutput, buffer); } } } if (recursive) { //Cannot call recursivelly with the recursive flag in order to avoid infinite loops. var done = false; var extracted = new HashSet <string>(); do { done = true; foreach (string file in Directory.GetFiles(output, "*.zip", SearchOption.AllDirectories)) { if (!extracted.Contains(file)) { extracted.Add(file); var connector = new Zip(file); connector.Extract(false); done = false; } } } while(!done); } }
object extract(ZipFile zip, string rootDirectory, string targetName, ZipEntry entry, Dictionary <string, bool> dirs) { string dirName; if (UsePath) { dirName = (entry.IsFile) ? Path.GetDirectoryName(Path.GetFullPath(targetName)) : targetName; } else { dirName = rootDirectory; } // Get date time from entry DateTime?entryTimeUtc = getEntryTimeUtc(entry); // Create directory bool skip = false; string zipDirName = string.Empty; ZipEntry dirZipEntry = null; if (entry.IsFile) { zipDirName = Path.GetDirectoryName(entry.Name); dirZipEntry = zip.GetEntry(zipDirName); if (dirZipEntry == null) { dirZipEntry = zip.GetEntry(zipDirName + "/"); } } else { zipDirName = entry.Name; dirZipEntry = entry; } if (dirs.TryGetValue(zipDirName, out skip) && skip) { return(null); } if (entry.IsDirectory) { DirectoryInfo dir = new DirectoryInfo(dirName); var zz = new ZipFSEntry(entry, ZipTime); var to = new FileOrDirectoryInfo(dir); object r = ProcessPrepare(zz, to, () => null); if (r == null) { r = ProcessComplete(new ZipFSEntry(entry, ZipTime), new FileOrDirectoryInfo(dir), skip, skp => { dirs[zipDirName] = skip = skp; if (Extract && !skp) { DirectoryInfo di = Directory.CreateDirectory(dirName); setAttributes(di, entry); } return(null); }); } if (r != null || skip) { return(r); } } if (entry.IsFile) { var pfrom = new ZipFSEntry(entry, ZipTime); var fi = new FileInfo(targetName); var pto = new FileOrDirectoryInfo(fi); if (fi.Exists) { if (Overwrite == OverwriteMode.Never) { skip = true; } if (Overwrite == OverwriteMode.IfNewer) { if (entryTimeUtc == null || entryTimeUtc <= File.GetLastWriteTimeUtc(targetName)) { VerboseMessage("Ignoring never file {0}", targetName); skip = true; } } } if ((skip && Overwrite != OverwriteMode.Confirm)) { return(null); } object r = ProcessPrepare(pfrom, pto, () => null); if (r != null) { return(r); } return(ProcessComplete(pfrom, pto, skip, sk => { if (sk || !Extract) { return null; } if (!fi.Directory.Exists) { DirectoryInfo di = Directory.CreateDirectory(dirName); if (dirZipEntry != null) { setAttributes(di, dirZipEntry); } } const FileAttributes mask = (FileAttributes.ReadOnly | FileAttributes.Hidden | FileAttributes.System); if (fi.Exists && (fi.Attributes & mask) != 0) { fi.Attributes = fi.Attributes & ~mask; } try { using (Stream outputStream = Context.CreateStream(targetName)) { StreamUtils.Copy(zip.GetInputStream(entry), outputStream, new byte[16384], delegate(object x, ProgressEventArgs y) { Context.OnProgress(1, y.Name); }, ProgressInterval, this, entry.Name, entry.Size); } } catch { File.Delete(targetName); throw; } setAttributes(fi, entry); return null; })); } return(null); }
public void ExtractZip(string ZipFullPath, string TargetDirectory, string Password) { ZipFile zf = null; try { FileStream fs = File.OpenRead(ZipFullPath); zf = new ZipFile(fs); if (!string.IsNullOrEmpty(Password)) { zf.Password = Password; // AES encrypted entries are handled automatically } long TotalCount = zf.Count; foreach (ZipEntry Entry in zf) { if (!Entry.IsFile) { continue; // Ignore directories } string DirectoryName = Path.GetDirectoryName(Entry.Name); string FileName = Path.GetFileName(Entry.Name); string FolderName = Path.Combine(TargetDirectory, DirectoryName); if (this.BeforeExtract != null) { CBeforeExtractEventArgs e = new CBeforeExtractEventArgs() { FolderNameInZip = DirectoryName, FolderName = FolderName, FileName = FileName, TotalCountToExtract = TotalCount, CountExtracted = Entry.ZipFileIndex + 1, ProgressPercentage = Math.Min(100, Convert.ToInt32(((Entry.ZipFileIndex + 1) / (double)TotalCount) * 100)) }; this.BeforeExtract(this, e); if (!string.IsNullOrEmpty(e.NewFolderNameIs)) { FolderName = e.NewFolderNameIs; } } byte[] buffer = new byte[4096]; // 4K is optimum Stream zipStream = zf.GetInputStream(Entry); if (!Directory.Exists(FolderName)) { Directory.CreateDirectory(FolderName); } string FullPathDest = Path.Combine(FolderName, FileName); // Unzip file in buffered chunks. This is just as fast as unpacking to a buffer the full size // of the file, but does not waste memory. // The "using" will close the stream even if an exception occurs. using (FileStream streamWriter = File.Create(FullPathDest)) { StreamUtils.Copy(zipStream, streamWriter, buffer); } FileInfo fi = new FileInfo(FullPathDest); fi.LastWriteTime = Entry.DateTime; if (this.AfterExtract != null) { CAfterExtractEventArgs e = new CAfterExtractEventArgs() { FullPath = FullPathDest, FileName = FileName }; this.AfterExtract(this, e); } } } finally { if (zf != null) { zf.IsStreamOwner = true; // Makes close also shut the underlying stream zf.Close(); // Ensure we release resources } } }
/// <summary> /// Adds a directory, along with all files and subdirectories, to the ZipStream. /// </summary> /// <param name="directoryToCompress">The directory to add recursively</param> /// <param name="zipStream">The ZipStream to which the files and directories will be added</param> /// <param name="dirNameOffset">This number of characters will be removed from the full directory or file name /// before creating the zip entry name</param> /// <param name="dirNamePrefix">string to prefix to the zip entry name</param> /// <param name="depthFromCollection">int with the number of folders away it is from the collection folder. The collection folder itself is 0, /// a book is 1, a subfolder of the book is 2, etc.</param> /// <param name="forReaderTools">If True, then some pre-processing will be done to the contents of decodable /// and leveled readers before they are added to the ZipStream</param> /// <param name="excludeAudio">If true, the contents of the audio directory will not be included</param> /// <param name="reduceImages">If true, image files are reduced in size to no larger than the max size before saving</para> /// <param name="omitMetaJson">If true, meta.json is excluded (typically for HTML readers).</param> private static void CompressDirectory(string directoryToCompress, ZipOutputStream zipStream, int dirNameOffset, string dirNamePrefix, int depthFromCollection, bool forReaderTools, bool excludeAudio, bool reduceImages, bool omitMetaJson = false, string pathToFileForSha = null) { if (excludeAudio && Path.GetFileName(directoryToCompress).ToLowerInvariant() == "audio") { return; } var files = Directory.GetFiles(directoryToCompress); // Don't get distracted by HTML files in any folder other than the book folder. // These HTML files in other locations aren't generated by Bloom. They may not have the format Bloom expects, // causing needless parsing errors to be thrown if we attempt to read them using Bloom code. bool shouldScanHtml = depthFromCollection == 1; // 1 means 1 level below the collection level, i.e. this is the book level var bookFile = shouldScanHtml ? BookStorage.FindBookHtmlInFolder(directoryToCompress) : null; XmlDocument dom = null; List <string> imagesToGiveTransparentBackgrounds = null; List <string> imagesToPreserveResolution = null; // Tests can also result in bookFile being null. if (!String.IsNullOrEmpty(bookFile)) { var originalContent = File.ReadAllText(bookFile, Encoding.UTF8); dom = XmlHtmlConverter.GetXmlDomFromHtml(originalContent); var fullScreenAttr = dom.GetElementsByTagName("body").Cast <XmlElement>().First().Attributes["data-bffullscreenpicture"]?.Value; if (fullScreenAttr != null && fullScreenAttr.IndexOf("bloomReader", StringComparison.InvariantCulture) >= 0) { // This feature (currently used for motion books in landscape mode) triggers an all-black background, // due to a rule in bookFeatures.less. // Making white pixels transparent on an all-black background makes line-art disappear, // which is bad (BL-6564), so just make an empty list in this case. imagesToGiveTransparentBackgrounds = new List <string>(); } else { imagesToGiveTransparentBackgrounds = FindCoverImages(dom); } imagesToPreserveResolution = FindImagesToPreserveResolution(dom); FindBackgroundAudioFiles(dom); } else { imagesToGiveTransparentBackgrounds = new List <string>(); imagesToPreserveResolution = new List <string>(); } // Some of the knowledge about ExcludedFileExtensions might one day move into this method. // But we'd have to check carefully the other places it is used. var localOnlyFiles = BookStorage.LocalOnlyFiles(directoryToCompress); foreach (var filePath in files) { if (ExcludedFileExtensionsLowerCase.Contains(Path.GetExtension(filePath.ToLowerInvariant()))) { continue; // BL-2246: skip putting this one into the BloomPack } if (IsUnneededWaveFile(filePath, depthFromCollection)) { continue; } if (localOnlyFiles.Contains(filePath)) { continue; } var fileName = Path.GetFileName(filePath).ToLowerInvariant(); if (fileName.StartsWith(BookStorage.PrefixForCorruptHtmFiles)) { continue; } // Various stuff we keep in the book folder that is useful for editing or bloom library // or displaying collections but not needed by the reader. The most important is probably // eliminating the pdf, which can be very large. Note that we do NOT eliminate the // basic thumbnail.png, as we want eventually to extract that to use in the Reader UI. if (fileName == "thumbnail-70.png" || fileName == "thumbnail-256.png") { continue; } if (fileName == "meta.json" && omitMetaJson) { continue; } FileInfo fi = new FileInfo(filePath); var entryName = dirNamePrefix + filePath.Substring(dirNameOffset); // Makes the name in zip based on the folder entryName = ZipEntry.CleanName(entryName); // Removes drive from name and fixes slash direction ZipEntry newEntry = new ZipEntry(entryName) { DateTime = fi.LastWriteTime, IsUnicodeText = true }; // encode filename and comment in UTF8 byte[] modifiedContent = {}; // if this is a ReaderTools book, call GetBookReplacedWithTemplate() to get the contents if (forReaderTools && (bookFile == filePath)) { modifiedContent = Encoding.UTF8.GetBytes(GetBookReplacedWithTemplate(filePath)); newEntry.Size = modifiedContent.Length; } else if (forReaderTools && (Path.GetFileName(filePath) == "meta.json")) { modifiedContent = Encoding.UTF8.GetBytes(GetMetaJsonModfiedForTemplate(filePath)); newEntry.Size = modifiedContent.Length; } else if (reduceImages && ImageFileExtensions.Contains(Path.GetExtension(filePath.ToLowerInvariant()))) { fileName = Path.GetFileName(filePath); // restore original capitalization if (imagesToPreserveResolution.Contains(fileName)) { modifiedContent = RobustFile.ReadAllBytes(filePath); } else { // Cover images should be transparent if possible. Others don't need to be. var makeBackgroundTransparent = imagesToGiveTransparentBackgrounds.Contains(fileName); modifiedContent = GetImageBytesForElectronicPub(filePath, makeBackgroundTransparent); } newEntry.Size = modifiedContent.Length; } else if (Path.GetExtension(filePath).ToLowerInvariant() == ".bloomcollection") { modifiedContent = Encoding.UTF8.GetBytes(GetBloomCollectionModifiedForTemplate(filePath)); newEntry.Size = modifiedContent.Length; } // CompressBookForDevice is always called with reduceImages set. else if (reduceImages && bookFile == filePath) { SignLanguageApi.ProcessVideos(HtmlDom.SelectChildVideoElements(dom.DocumentElement).Cast <XmlElement>(), directoryToCompress); var newContent = XmlHtmlConverter.ConvertDomToHtml5(dom); modifiedContent = Encoding.UTF8.GetBytes(newContent); newEntry.Size = modifiedContent.Length; if (pathToFileForSha != null) { // Make an extra entry containing the sha var sha = Book.ComputeHashForAllBookRelatedFiles(pathToFileForSha); var name = "version.txt"; // must match what BloomReader is looking for in NewBookListenerService.IsBookUpToDate() MakeExtraEntry(zipStream, name, sha); LastVersionCode = sha; } } else { newEntry.Size = fi.Length; } zipStream.PutNextEntry(newEntry); if (modifiedContent.Length > 0) { using (var memStream = new MemoryStream(modifiedContent)) { // There is some minimum buffer size (44 was too small); I don't know exactly what it is, // but 1024 makes it happy. StreamUtils.Copy(memStream, zipStream, new byte[Math.Max(modifiedContent.Length, 1024)]); } } else { // Zip the file in buffered chunks byte[] buffer = new byte[4096]; using (var streamReader = RobustFile.OpenRead(filePath)) { StreamUtils.Copy(streamReader, zipStream, buffer); } } zipStream.CloseEntry(); } var folders = Directory.GetDirectories(directoryToCompress); foreach (var folder in folders) { var dirName = Path.GetFileName(folder); if ((dirName == null) || (dirName.ToLowerInvariant() == "sample texts")) { continue; // Don't want to bundle these up } CompressDirectory(folder, zipStream, dirNameOffset, dirNamePrefix, depthFromCollection + 1, forReaderTools, excludeAudio, reduceImages); } }
private static ulong PackDataBlock(Stream target, List <HyoutaArchiveFileInfo> files, byte packedAlignment, EndianUtils.Endianness endian) { byte smallPackedAlignment = ToSmallPackedAlignment(packedAlignment); long startPosition = target.Position; target.WriteUInt16(0); // offsetToFirstFileInfo, fill in later bool hasDummyContent = files.Any(x => x.DummyContent != null); uint dummyContentLength = hasDummyContent ? ((uint)files.Max(x => x.DummyContent?.Length ?? 0)).Align(1 << smallPackedAlignment) : 0; bool hasFilename = files.Any(x => x.Filename != null); uint filenameLength = 0; //bool embedFilenamesInFileInfo = false; List <byte[]?>?encodedFilenames = null; if (hasFilename) { // figure out whether we want the strings to embed into the fileinfo directly // or whether to use an offset and write the string data at the end of the fileinfo // note that if a string is <= 8 bytes we can always embed it as we'd need 8 bytes for the offset anyway // so... encodedFilenames = new List <byte[]?>(files.Count); long longestBytecount = 0; long totalBytecount = 0; long filenameCountOver8Bytes = 0; for (int i = 0; i < files.Count; ++i) { var currentFilename = files[i].Filename; if (currentFilename == null) { encodedFilenames.Add(null); } else { byte[] stringbytes = EncodeString(currentFilename); encodedFilenames.Add(stringbytes); if (stringbytes.LongLength > 8) { longestBytecount = Math.Max(longestBytecount, stringbytes.LongLength); totalBytecount += stringbytes.LongLength; ++filenameCountOver8Bytes; } } } // alright so we have, in practice, two options here // - make filenameLength == 16, store strings that are longer than that offsetted long nonEmbedSize = files.Count * 16 + totalBytecount.Align(1 << smallPackedAlignment); // - make filenameLength long enough so all strings can be embedded long embedSize = files.Count * (8 + longestBytecount).Align(1 << smallPackedAlignment); // pick whatever results in a smaller file; on a tie embed if (nonEmbedSize < embedSize) { //embedFilenamesInFileInfo = false; filenameLength = 16; } else { //embedFilenamesInFileInfo = true; filenameLength = (uint)(8 + longestBytecount).Align(1 << smallPackedAlignment); } } bool hasCompression = files.Any(x => x.CompressionInfo != null); uint compressionInfoLength = hasCompression ? files.Max(x => x.CompressionInfo?.MaximumCompressionInfoLength() ?? 0).Align(1 << smallPackedAlignment) : 0; bool hasBpsPatch = files.Any(x => x.BpsPatchInfo != null); uint bpsPatchInfoLength = hasBpsPatch ? 16u.Align(1 << smallPackedAlignment) : 0; bool hasCrc32 = files.Any(x => x.crc32 != null); uint crc32ContentLength = hasCrc32 ? 4u.Align(1 << smallPackedAlignment) : 0u; bool hasMd5 = files.Any(x => x.md5 != null); uint md5ContentLength = hasMd5 ? 16u.Align(1 << smallPackedAlignment) : 0u; bool hasSha1 = files.Any(x => x.sha1 != null); uint sha1ContentLength = hasSha1 ? 20u.Align(1 << smallPackedAlignment) : 0u; ushort contentBitfield1 = 0; contentBitfield1 |= (ushort)(hasDummyContent ? 0x0001u : 0); contentBitfield1 |= (ushort)(hasFilename ? 0x0002u : 0); contentBitfield1 |= (ushort)(hasCompression ? 0x0004u : 0); contentBitfield1 |= (ushort)(hasBpsPatch ? 0x0008u : 0); contentBitfield1 |= (ushort)(hasCrc32 ? 0x0010u : 0); contentBitfield1 |= (ushort)(hasMd5 ? 0x0020u : 0); contentBitfield1 |= (ushort)(hasSha1 ? 0x0040u : 0); target.WriteUInt16(contentBitfield1, endian); if (hasDummyContent) { WriteContentLength(dummyContentLength, target, endian); } if (hasFilename) { WriteContentLength(filenameLength, target, endian); } if (hasCompression) { WriteContentLength(compressionInfoLength, target, endian); } if (hasBpsPatch) { WriteContentLength(bpsPatchInfoLength, target, endian); } if (hasCrc32) { WriteContentLength(crc32ContentLength, target, endian); } if (hasMd5) { WriteContentLength(md5ContentLength, target, endian); } if (hasSha1) { WriteContentLength(sha1ContentLength, target, endian); } long offsetToFirstFileInfo = (target.Position - startPosition).Align(1 << smallPackedAlignment); StreamUtils.WriteZeros(target, offsetToFirstFileInfo - (target.Position - startPosition)); target.Position = startPosition; WriteContentLength((uint)offsetToFirstFileInfo, target, endian); target.Position = startPosition + offsetToFirstFileInfo; long singleFileInfoLength = 16 + dummyContentLength + filenameLength + compressionInfoLength + bpsPatchInfoLength + crc32ContentLength + md5ContentLength + sha1ContentLength; long totalFileInfoLength = singleFileInfoLength * files.Count; long offsetToEndOfFileInfo = (offsetToFirstFileInfo + totalFileInfoLength).Align(1 << smallPackedAlignment); StreamUtils.WriteZeros(target, offsetToEndOfFileInfo - offsetToFirstFileInfo); var filedata = new List <(long position, DuplicatableStream data)>(files.Count); long positionOfFreeSpace = offsetToEndOfFileInfo; for (int i = 0; i < files.Count; ++i) { HyoutaArchiveFileInfo fi = files[i]; var fiData = fi.Data; if (fiData == null) { throw new Exception("Data of file " + i + " is null."); } using (DuplicatableStream fs = fiData.Duplicate()) { DuplicatableStream streamToWrite = fs; bool streamIsInternallyCompressed = fi.StreamIsCompressed; if (fi.BpsPatchInfo != null && fi.CompressionInfo != null && streamIsInternallyCompressed && !fi.StreamIsBpsPatch) { // this is a weird case; the stream wants both bps patch and compression // and is already compressed but not already bps patched, which breaks the defined order // we can handle this by decompressing, creating patch, recompressing streamToWrite = fi.DataStream.Duplicate(); // this decompresses the stream streamIsInternallyCompressed = false; // and fake-set the stream as uncompressed for packing logic } byte[]? bpsPatchInfoBytes = null; byte[]? compressionInfoBytes = null; if (hasBpsPatch) { if (fi.BpsPatchInfo == null) { // chunk has patches but this file is unpatched; we store this by pointing the file to itself bpsPatchInfoBytes = new HyoutaArchiveBpsPatchInfo((ulong)i, (ulong)streamToWrite.Length, null).Serialize(endian); } else if (fi.StreamIsBpsPatch) { bpsPatchInfoBytes = fi.BpsPatchInfo.Serialize(endian); } else { var p = HyoutaArchiveBps.CreatePatch(fi.BpsPatchInfo, streamToWrite, endian); bpsPatchInfoBytes = p.patchInfo; streamToWrite = new DuplicatableByteArrayStream(p.patchData); } } if (hasCompression && fi.CompressionInfo != null) { if (streamIsInternallyCompressed) { compressionInfoBytes = fi.CompressionInfo.Serialize(endian); } else { var p = fi.CompressionInfo.Compress(streamToWrite, endian); compressionInfoBytes = p.compressionInfo; streamToWrite = new DuplicatableByteArrayStream(p.compressedData); } } // write file info target.Position = (singleFileInfoLength * i) + offsetToFirstFileInfo + startPosition; long positionPosition = target.Position; target.WriteUInt64(0); // position of file, will be filled later target.WriteUInt64((ulong)streamToWrite.Length, endian); if (hasDummyContent) { if (fi.DummyContent != null) { target.Write(fi.DummyContent); target.WriteZeros(dummyContentLength - fi.DummyContent.Length); } else { target.WriteZeros(dummyContentLength); } } if (hasFilename) { if (fi.Filename != null) { var efn = encodedFilenames ![i];
public override void Deserialise(Stream inStr) { Data = new byte[64]; StreamUtils.EnsureRead(inStr, Data); }
public void writeKML(string filename) { try { writeGPX(filename); } catch { } Color[] colours = { Color.Red, Color.Orange, Color.Yellow, Color.Green, Color.Blue, Color.Indigo, Color.Violet, Color.Pink }; AltitudeMode altmode = AltitudeMode.absolute; // all new logs have both agl and asl, we are using asl. this may break old logs // if (MainV2.comPort.MAV.cs.firmware == MainV2.Firmwares.ArduCopter2) { // altmode = AltitudeMode.relativeToGround; // because of sonar, this is both right and wrong. right for sonar, wrong in terms of gps as the land slopes off. } KMLRoot kml = new KMLRoot(); Folder fldr = new Folder("Log"); Style style = new Style(); style.Id = "yellowLineGreenPoly"; style.Add(new LineStyle(HexStringToColor("7f00ffff"), 4)); PolyStyle pstyle = new PolyStyle(); pstyle.Color = HexStringToColor("7f00ff00"); style.Add(pstyle); kml.Document.AddStyle(style); int stylecode = 0xff; int g = -1; foreach (List <Point3D> poslist in position) { g++; if (poslist == null) { continue; } LineString ls = new LineString(); ls.AltitudeMode = altmode; ls.Extrude = true; //ls.Tessellate = true; Coordinates coords = new Coordinates(); coords.AddRange(poslist); ls.coordinates = coords; Placemark pm = new Placemark(); string mode = ""; if (g < modelist.Count) { mode = modelist[g]; } pm.name = g + " Flight Path " + mode; pm.styleUrl = "#yellowLineGreenPoly"; pm.LineString = ls; stylecode = colours[g % (colours.Length - 1)].ToArgb(); Style style2 = new Style(); Color color = Color.FromArgb(0xff, (stylecode >> 16) & 0xff, (stylecode >> 8) & 0xff, (stylecode >> 0) & 0xff); log.Info("colour " + color.ToArgb().ToString("X") + " " + color.ToKnownColor().ToString()); style2.Add(new LineStyle(color, 4)); pm.AddStyle(style2); fldr.Add(pm); } Folder planes = new Folder(); planes.name = "Planes"; fldr.Add(planes); Folder waypoints = new Folder(); waypoints.name = "Waypoints"; fldr.Add(waypoints); LineString lswp = new LineString(); lswp.AltitudeMode = AltitudeMode.relativeToGround; lswp.Extrude = true; Coordinates coordswp = new Coordinates(); foreach (PointLatLngAlt p1 in cmd) { coordswp.Add(new Point3D(p1.Lng, p1.Lat, p1.Alt)); } lswp.coordinates = coordswp; Placemark pmwp = new Placemark(); pmwp.name = "Waypoints"; //pm.styleUrl = "#yellowLineGreenPoly"; pmwp.LineString = lswp; waypoints.Add(pmwp); int a = 0; int l = -1; Model lastmodel = null; foreach (Data mod in flightdata) { l++; if (mod.model.Location.latitude == 0) { continue; } if (lastmodel != null) { if (lastmodel.Location.Equals(mod.model.Location)) { continue; } } Placemark pmplane = new Placemark(); pmplane.name = "Plane " + a; pmplane.visibility = false; Model model = mod.model; model.AltitudeMode = altmode; model.Scale.x = 2; model.Scale.y = 2; model.Scale.z = 2; try { pmplane.description = @"<![CDATA[ <table> <tr><td>Roll: " + model.Orientation.roll + @" </td></tr> <tr><td>Pitch: " + model.Orientation.tilt + @" </td></tr> <tr><td>Yaw: " + model.Orientation.heading + @" </td></tr> <tr><td>WP dist " + mod.ntun[2] + @" </td></tr> <tr><td>tar bear " + mod.ntun[3] + @" </td></tr> <tr><td>nav bear " + mod.ntun[4] + @" </td></tr> <tr><td>alt error " + mod.ntun[5] + @" </td></tr> </table> ]]>"; } catch { } try { pmplane.Point = new KmlPoint((float)model.Location.longitude, (float)model.Location.latitude, (float)model.Location.altitude); pmplane.Point.AltitudeMode = altmode; Link link = new Link(); link.href = "block_plane_0.dae"; model.Link = link; pmplane.Model = model; planes.Add(pmplane); } catch { } // bad lat long value lastmodel = mod.model; a++; } kml.Document.Add(fldr); kml.Save(filename); // create kmz - aka zip file FileStream fs = File.Open(filename.ToLower().Replace(".log.kml", ".kmz").Replace(".bin.kml", ".kmz"), FileMode.Create); ZipOutputStream zipStream = new ZipOutputStream(fs); zipStream.SetLevel(9); //0-9, 9 being the highest level of compression zipStream.UseZip64 = UseZip64.Off; // older zipfile // entry 1 string entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction ZipEntry newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); File.Delete(filename); filename = Path.GetDirectoryName(Application.ExecutablePath) + Path.DirectorySeparatorChar + "block_plane_0.dae"; // entry 2 entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); zipStream.IsStreamOwner = true; // Makes the Close also Close the underlying stream zipStream.Close(); positionindex = 0; modelist.Clear(); flightdata.Clear(); position = new List <Core.Geometry.Point3D> [200]; cmd.Clear(); }
void DownloadBars() { if (outputType == "csv") { DownloadBarsEnumerator enumerator = quoteClient.DownloadBars(symbol, priceType, period, from, to, -1); string path = Path.Combine(this.location, string.Format("{0} {1} {2} {3} {4}.csv", symbol.Replace("/", "%2F"), priceType, period, from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time;open;close;low;high;volume"); for (Bar bar = enumerator.Next(-1); bar != null; bar = enumerator.Next(-1)) { file.WriteLine(string.Format("{0};{1};{2};{3};{4};{5}", bar.From.ToString("yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture), bar.Open, bar.Close, bar.Low, bar.High, bar.Volume)); } } this.Log("Bars are downloaded successfully"); } else if (outputType == "hdf5") { DownloadBarsEnumerator enumerator = quoteClient.DownloadBars(symbol, priceType, period, from, to, -1); string path = Path.Combine(this.location, string.Format("{0} {1} {2} {3} {4}.h5", symbol.Replace("/", "%2F"), priceType, period, from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); var barsData = new List <Bar>(); for (Bar bar = enumerator.Next(-1); bar != null; bar = enumerator.Next(-1)) { barsData.Add(bar); } if (barsData.Count == 0) { return; } double[,] barsArray = new double[barsData.Count, 5]; long[,] dataBarsArray = new long[barsData.Count, 2]; long priceTypeString = 0; if (priceType == PriceType.Ask) { priceTypeString = 1; } for (int i = 0; i < barsData.Count; i++) { dataBarsArray[i, 0] = priceTypeString; long dateTimestamp = (long)(barsData[i].From.Subtract(new DateTime(1970, 1, 1))).TotalMilliseconds; dataBarsArray[i, 1] = dateTimestamp; barsArray[i, 0] = barsData[i].Volume; barsArray[i, 1] = barsData[i].Open; barsArray[i, 2] = barsData[i].Close; barsArray[i, 3] = barsData[i].High; barsArray[i, 4] = barsData[i].Low; } H5DataTypeId barsTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); WriteDataToNewFile(fileId, "Bars", barsArray, barsData.Count, 5, barsTypeId); H5DataTypeId dataBarsTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); WriteDataToNewFile(fileId, "DataBars", dataBarsArray, barsData.Count, 2, dataBarsTypeId); H5F.close(fileId); this.Log("Bars are downloaded successfully"); } else if (outputType == "csv_zip") { DownloadBarsEnumerator BidEnumerator = quoteClient.DownloadBars(symbol, PriceType.Bid, period, from, to, -1); DownloadBarsEnumerator AskEnumerator = quoteClient.DownloadBars(symbol, PriceType.Ask, period, from, to, -1); string path = Path.Combine(location, $"{symbol.Replace("/", "%2F")}_{period}_{from.ToString("yyyy-MM-dd")}_{to.ToString("yyyy-MM-dd")}.zip"); using (ZipOutputStream zs = new ZipOutputStream(File.Create(path))) { string filename = $"{period} bid.csv"; using (StreamWriter file = File.CreateText(filename)) { currentTempFile = filename; DateTime lastQuoteTime = DateTime.MinValue; int repeatingQuotes = 0; for (Bar bar = BidEnumerator.Next(-1); bar != null; bar = BidEnumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(bar.From.ToString("yyyy.MM.dd HH:mm:ss", CultureInfo.InvariantCulture)); if (bar.From == lastQuoteTime) { repeatingQuotes++; } else { lastQuoteTime = bar.From; repeatingQuotes = 0; } if (repeatingQuotes > 0) { builder.Append(-repeatingQuotes); } builder.Append(","); builder.Append($"{bar.Open},{bar.High},{bar.Low},{bar.Close},{bar.Volume},"); builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } FileInfo fi = new FileInfo(filename); ZipEntry newEntry = new ZipEntry(filename); newEntry.Size = fi.Length; zs.PutNextEntry(newEntry); byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zs, buffer); } zs.CloseEntry(); File.Delete(filename); currentTempFile = null; filename = $"{period} ask.csv"; using (StreamWriter file = File.CreateText(filename)) { currentTempFile = filename; DateTime lastQuoteTime = DateTime.MinValue; int repeatingQuotes = 0; for (Bar bar = AskEnumerator.Next(-1); bar != null; bar = AskEnumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(bar.From.ToString("yyyy.MM.dd HH:mm:ss", CultureInfo.InvariantCulture)); if (bar.From == lastQuoteTime) { repeatingQuotes++; } else { lastQuoteTime = bar.From; repeatingQuotes = 0; } if (repeatingQuotes > 0) { builder.Append(-repeatingQuotes); } builder.Append(","); builder.Append($"{bar.Open},{bar.High},{bar.Low},{bar.Close},{bar.Volume},"); builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } fi = new FileInfo(filename); newEntry = new ZipEntry(filename); newEntry.Size = fi.Length; zs.PutNextEntry(newEntry); buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zs, buffer); } zs.CloseEntry(); File.Delete(filename); currentTempFile = null; } this.Log("Bars are downloaded successfully"); } }
private bool _ReadV4(BinaryReader r) { WavPackHeader4 wvh4 = new WavPackHeader4(); byte[] EncBuf = new byte[4096]; int tempo; byte encoderbyte; bool result = false; wvh4.Reset(); wvh4.ckID = r.ReadChars(4); wvh4.ckSize = r.ReadUInt32(); wvh4.version = r.ReadUInt16(); wvh4.track_no = r.ReadByte(); wvh4.index_no = r.ReadByte(); wvh4.total_samples = r.ReadUInt32(); wvh4.block_index = r.ReadUInt32(); wvh4.block_samples = r.ReadUInt32(); wvh4.flags = r.ReadUInt32(); wvh4.crc = r.ReadUInt32(); if (StreamUtils.StringEqualsArr("wvpk", wvh4.ckID)) // wavpack header found -- TODO handle exceptions better { result = true; version = (wvh4.version >> 8); channelsArrangement = ChannelsArrangements.GuessFromChannelNumber((int)(2 - (wvh4.flags & 4))); bits = (int)((wvh4.flags & 3) * 16); // bytes stored flag samples = wvh4.total_samples; bSamples = wvh4.block_samples; sampleRate = (int)((wvh4.flags & (0x1F << 23)) >> 23); if ((sampleRate > 14) || (sampleRate < 0)) { sampleRate = 44100; } else { sampleRate = sample_rates[sampleRate]; } if (8 == (wvh4.flags & 8)) // hybrid flag { encoder = "hybrid lossy"; codecFamily = AudioDataIOFactory.CF_LOSSY; } else { //if (2 == (wvh4.flags & 2) ) { // lossless flag encoder = "lossless"; codecFamily = AudioDataIOFactory.CF_LOSSLESS; } /* * if ((wvh4.flags & 0x20) > 0) // MODE_HIGH * { * FEncoder = FEncoder + " (high)"; * end * else if ((wvh4.flags & 0x40) > 0) // MODE_FAST * { * FEncoder = FEncoder + " (fast)"; * } */ duration = (double)wvh4.total_samples * 1000.0 / sampleRate; if (duration > 0) { bitrate = (sizeInfo.FileSize - tagSize) * 8 / (double)(samples * 1000.0 / (double)sampleRate); } Array.Clear(EncBuf, 0, 4096); EncBuf = r.ReadBytes(4096); for (tempo = 0; tempo < 4096; tempo++) { if (0x65 == EncBuf[tempo]) { if (0x02 == EncBuf[tempo + 1]) { encoderbyte = EncBuf[tempo + 2]; switch (encoderbyte) { case 8: encoder = encoder + " (high)"; break; case 0: encoder = encoder + " (normal)"; break; case 2: encoder = encoder + " (fast)"; break; case 6: encoder = encoder + " (very fast)"; break; } break; } } } } return(result); }
/// <summary> /// SharpZipLib를 이용해 압축함. 간단 버전은 SharpZipLib의 FastZip 클래스 이용하면 되나 BeforeCompress 이벤트를 사용하기 위함. /// </summary> /// <param name="SearchPattern"> /// The search string. For example, "System*" can be used to search for all directories that begin with the word "System". /// </param> public void CreateZip(string ZipFullPath, string SearchPattern, SearchOption SearchOption, string[] aSourceFolder, string Password) { using (ZipOutputStream OutputStream = new ZipOutputStream(File.Create(ZipFullPath))) { OutputStream.SetLevel(6); // 0 - store only to 9 - means best compression if (!string.IsNullOrEmpty(Password)) { OutputStream.Password = Password; } foreach (string SourceFolder in aSourceFolder) { DirectoryInfo di = new DirectoryInfo(SourceFolder); FileInfo[] aFiles = di.GetFiles(SearchPattern, SearchOption); ZipNameTransform NameTransform = new ZipNameTransform(SourceFolder); foreach (FileInfo file in aFiles) { string FullPathSrc = file.FullName; string FolderNameInZip = NameTransform.TransformFile(file.FullName); if (this.BeforeCompress != null) { CBeforeCompressEventArgs e = new CBeforeCompressEventArgs() { FullPathSrc = FullPathSrc, FolderNameInZip = FolderNameInZip }; this.BeforeCompress(this, e); if (e.Cancel) { continue; } if (!string.IsNullOrEmpty(e.NewFolderNameInZipIs)) { FolderNameInZip = e.NewFolderNameInZipIs; } } ZipEntry entry = new ZipEntry(FolderNameInZip); entry.DateTime = file.LastWriteTime; // set Size and the crc, because the information // about the size and crc should be stored in the header // if it is not set it is automatically written in the footer. // (in this case size == crc == -1 in the header) // Some ZIP programs have problems with zip files that don't store // the size and crc in the header. entry.Size = file.Length; OutputStream.PutNextEntry(entry); byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(FullPathSrc)) { StreamUtils.Copy(streamReader, OutputStream, buffer); } OutputStream.CloseEntry(); } } OutputStream.Finish(); OutputStream.Close(); } }
private bool _ReadV3(BinaryReader r) { RiffChunk chunk = new RiffChunk(); char[] wavchunk; FormatChunk fmt; bool hasfmt; WavpackHeader3 wvh3 = new WavpackHeader3(); bool result = false; hasfmt = false; // read and evaluate header chunk.Reset(); chunk.id = r.ReadChars(4); chunk.size = r.ReadUInt32(); wavchunk = r.ReadChars(4); if (!StreamUtils.StringEqualsArr("WAVE", wavchunk)) { return(result); } // start looking for chunks chunk.Reset(); while (r.BaseStream.Position < r.BaseStream.Length) { chunk.id = r.ReadChars(4); chunk.size = r.ReadUInt32(); if (chunk.size <= 0) { break; } if (StreamUtils.StringEqualsArr("fmt ", chunk.id)) // Format chunk found read it { if (chunk.size >= 16 /*sizeof(fmt_chunk)*/) { fmt.wformattag = r.ReadUInt16(); fmt.wchannels = r.ReadUInt16(); fmt.dwsamplespersec = r.ReadUInt32(); fmt.dwavgbytespersec = r.ReadUInt32(); fmt.wblockalign = r.ReadUInt16(); fmt.wbitspersample = r.ReadUInt16(); hasfmt = true; result = true; formatTag = fmt.wformattag; channelsArrangement = ChannelsArrangements.GuessFromChannelNumber(fmt.wchannels); sampleRate = (int)fmt.dwsamplespersec; bits = fmt.wbitspersample; bitrate = (double)fmt.dwavgbytespersec * 8; } else { break; } } else { if ((StreamUtils.StringEqualsArr("data", chunk.id)) && hasfmt) { wvh3.Reset(); wvh3.ckID = r.ReadChars(4); wvh3.ckSize = r.ReadUInt32(); wvh3.version = r.ReadUInt16(); wvh3.bits = r.ReadUInt16(); wvh3.flags = r.ReadUInt16(); wvh3.shift = r.ReadUInt16(); wvh3.total_samples = r.ReadUInt32(); wvh3.crc = r.ReadUInt32(); wvh3.crc2 = r.ReadUInt32(); wvh3.extension = r.ReadChars(4); wvh3.extra_bc = r.ReadByte(); wvh3.extras = r.ReadChars(3); if (StreamUtils.StringEqualsArr("wvpk", wvh3.ckID)) // wavpack header found { result = true; version = wvh3.version; channelsArrangement = ChannelsArrangements.GuessFromChannelNumber(2 - (wvh3.flags & 1)); samples = wvh3.total_samples; codecFamily = AudioDataIOFactory.CF_LOSSLESS; // Encoder guess if (wvh3.bits > 0) { if ((wvh3.flags & NEW_HIGH_FLAG_v3) > 0) { encoder = "hybrid"; if ((wvh3.flags & WVC_FLAG_v3) > 0) { encoder += " lossless"; } else { encoder += " lossy"; codecFamily = AudioDataIOFactory.CF_LOSSY; } if ((wvh3.flags & EXTREME_DECORR_v3) > 0) { encoder = encoder + " (high)"; } } else { if ((wvh3.flags & (HIGH_FLAG_v3 | FAST_FLAG_v3)) == 0) { encoder = (wvh3.bits + 3).ToString() + "-bit lossy"; codecFamily = AudioDataIOFactory.CF_LOSSY; } else { encoder = (wvh3.bits + 3).ToString() + "-bit lossy"; codecFamily = AudioDataIOFactory.CF_LOSSY; if ((wvh3.flags & HIGH_FLAG_v3) > 0) { encoder += " high"; } else { encoder += " fast"; } } } } else { if ((wvh3.flags & HIGH_FLAG_v3) == 0) { encoder = "lossless (fast mode)"; } else { if ((wvh3.flags & EXTREME_DECORR_v3) > 0) { encoder = "lossless (high mode)"; } else { encoder = "lossless"; } } } if (sampleRate <= 0) { sampleRate = 44100; } duration = (double)wvh3.total_samples * 1000.0 / sampleRate; if (duration > 0) { bitrate = 8.0 * (sizeInfo.FileSize - tagSize - (double)wvh3.ckSize) / duration; } } break; } else // not a wv file { break; } } } // while return(result); }
private void writeResourceRecursive(string resourceDescriptionFilePath, ZipOutputStream zipStream) { // 如果已经转换过了 if (packageGroupManifest_resourceDefinitions.HasField(resourceDescriptionFilePath)) { return; } // 先找到之前已经写入缓存表的资源。 JSONObject resourceStorage = storage.GetField("assets").GetField(resourceDescriptionFilePath); if (resourceStorage == null) { Debug.LogError("创建资源包时写入'" + resourceDescriptionFilePath + "'失败,没有找到该资源的转换记录"); return; } // 创建每一个资源definition的json对象 JSONObject definitionObject = new JSONObject(JSONObject.Type.OBJECT); packageGroupManifest_resourceDefinitions.AddField(resourceDescriptionFilePath, definitionObject); // 写入字段 definitionObject.AddField("dependencies", resourceStorage.GetField("dependencies")); definitionObject.AddField("type", resourceStorage.GetField("type")); definitionObject.AddField("descriptionFileID", resourceDescriptionFilePath); if (resourceStorage.GetField("importSetting") != null) { definitionObject.AddField("importSetting", resourceStorage.GetField("importSetting")); } // 把记在缓存里的useFile拿出来,遍历,写入group.manifest.json的fileDescription字段 // 并且排除掉resourceDescriptionFilePath之后放入resourceDefinition JSONObject outputUseFileArray = new JSONObject(JSONObject.Type.ARRAY); definitionObject.AddField("useFile", outputUseFileArray); // 放入文件 foreach (string usingFile in WXUtility.ConvertJSONArrayToList(resourceStorage.GetField("useFile"))) { if (usingFile != resourceDescriptionFilePath) { outputUseFileArray.Add(usingFile); } if (packageGroupManifest_fileDescription.HasField(usingFile)) { continue; } JSONObject fileStorage = storage.GetField("files").GetField(usingFile); // add fileDescriptions JSONObject fileDescription = new JSONObject(JSONObject.Type.OBJECT); packageGroupManifest_fileDescription.AddField(usingFile, fileDescription); fileDescription.AddField("path", usingFile); // add files 555 JSONObject fileItem = new JSONObject(JSONObject.Type.OBJECT); fileItem.AddField("path", usingFile); fileItem.AddField("filetype", fileStorage.GetField("filetype")); packageGroupManifest_files.Add(fileItem); zipStream.PutNextEntry(new ZipEntry(usingFile)); var buffer = new byte[10240]; using (FileStream fsInput = File.OpenRead(Path.Combine(storagePath, Path.Combine(CONTENT_FOLDER, fileStorage.GetField("MD5").GetRawString())))) { StreamUtils.Copy(fsInput, zipStream, buffer); } } // 递归转依赖 foreach (string dependencyResource in WXUtility.ConvertJSONArrayToList(resourceStorage.GetField("dependencies"))) { writeResourceRecursive(dependencyResource, zipStream); } }
/// <summary> /// Starts a mock http server for tests /// </summary> public static void StartRequestHandler(HttpListener listener, AlternativeDataIndicator alternativeDataIndicator, RequestCount webRequestCount) { #pragma warning disable EPC13 // Suspiciously unobserved result. Task.Run( () => { while (listener.IsListening) { // Note: The GetContext method blocks while waiting for a request. var context = listener.GetContext(); var fileName = Path.GetFileName(context.Request.Url.LocalPath); var response = context.Response; byte[] worldBuffer = System.Text.Encoding.UTF8.GetBytes("Hello World"); byte[] galaxyBuffer = System.Text.Encoding.UTF8.GetBytes("Hello Galaxy"); byte[] universeBuffer = System.Text.Encoding.UTF8.GetBytes("Hello Universe"); switch (Path.GetExtension(fileName)) { case ".zip": MemoryStream outputMemStream = new MemoryStream(); ZipOutputStream zipStream = new ZipOutputStream(outputMemStream); zipStream.SetLevel(5); AddFile(zipStream, "world", worldBuffer); AddFile(zipStream, "galaxy", galaxyBuffer); AddFile(zipStream, "multi/universe", universeBuffer); zipStream.IsStreamOwner = false; zipStream.Close(); outputMemStream.Position = 0; response.ContentLength64 = outputMemStream.Length; StreamUtils.Copy(outputMemStream, response.OutputStream, new byte[4096]); break; case ".404": response.StatusCode = 404; response.ContentLength64 = worldBuffer.Length; response.OutputStream.Write(worldBuffer, 0, worldBuffer.Length); break; case ".txt": var buffer = alternativeDataIndicator.UseAlternativeData ? galaxyBuffer : worldBuffer; response.ContentLength64 = buffer.Length; response.OutputStream.Write(buffer, 0, buffer.Length); break; default: Assert.True(false, "Unexpected http request.."); break; } // Write buffer and close request response.Headers.Add("Content-type: application/octet-stream"); response.Headers.Add("Content-Description: File Transfer"); response.Headers.Add($"Content-Disposition: attachment; filename=\"{fileName}\")"); response.OutputStream.Close(); webRequestCount.Count += 1; } }); #pragma warning restore EPC13 // Suspiciously unobserved result. }
public static CharBlockArray Open(BinaryReader @in) { return(StreamUtils.DeserializeFromStream(@in) as CharBlockArray); }
void IStreamedObject.WriteStream(BinaryWriter writer, object options) { StreamUtils.WriteList(writer, this); }
private void readFrames(BinaryReader source, TagInfo Tag, MetaDataIO.ReadTagParams readTagParams) { string frameName; string strValue; int frameDataSize; long valuePosition; int frameFlags; source.BaseStream.Seek(Tag.FileSize - Tag.DataShift - Tag.Size, SeekOrigin.Begin); // Read all stored fields for (int iterator = 0; iterator < Tag.FrameCount; iterator++) { frameDataSize = source.ReadInt32(); frameFlags = source.ReadInt32(); frameName = StreamUtils.ReadNullTerminatedString(source, Utils.Latin1Encoding); // Slightly more permissive than what APE specs indicate in terms of allowed characters ("Space(0x20), Slash(0x2F), Digits(0x30...0x39), Letters(0x41...0x5A, 0x61...0x7A)") valuePosition = source.BaseStream.Position; if ((frameDataSize > 0) && (frameDataSize <= 500)) { /* * According to spec : "Items are not zero-terminated like in C / C++. * If there's a zero character, multiple items are stored under the key and the items are separated by zero characters." * * => Values have to be splitted */ strValue = Utils.StripEndingZeroChars(Encoding.UTF8.GetString(source.ReadBytes(frameDataSize))); strValue = strValue.Replace('\0', Settings.InternalValueSeparator).Trim(); SetMetaField(frameName.Trim().ToUpper(), strValue, readTagParams.ReadAllMetaFrames); } else if (frameDataSize > 0) // Size > 500 => Probably an embedded picture { int picturePosition; PictureInfo.PIC_TYPE picType = decodeAPEPictureType(frameName); if (picType.Equals(PictureInfo.PIC_TYPE.Unsupported)) { addPictureToken(getImplementedTagType(), frameName); picturePosition = takePicturePosition(getImplementedTagType(), frameName); } else { addPictureToken(picType); picturePosition = takePicturePosition(picType); } if (readTagParams.ReadPictures || readTagParams.PictureStreamHandler != null) { // Description seems to be a null-terminated ANSI string containing // * The frame name // * A byte (0x2E) // * The picture type (3 characters; similar to the 2nd part of the mime-type) String description = StreamUtils.ReadNullTerminatedString(source, Utils.Latin1Encoding); ImageFormat imgFormat = ImageUtils.GetImageFormatFromMimeType(description.Substring(description.Length - 3, 3)); PictureInfo picInfo = new PictureInfo(imgFormat, picType, getImplementedTagType(), frameName, picturePosition); picInfo.Description = description; picInfo.PictureData = new byte[frameDataSize - description.Length - 1]; source.BaseStream.Read(picInfo.PictureData, 0, frameDataSize - description.Length - 1); tagData.Pictures.Add(picInfo); if (readTagParams.PictureStreamHandler != null) { MemoryStream mem = new MemoryStream(picInfo.PictureData); readTagParams.PictureStreamHandler(ref mem, picInfo.PicType, picInfo.NativeFormat, picInfo.TagType, picInfo.NativePicCode, picInfo.Position); mem.Close(); } } } source.BaseStream.Seek(valuePosition + frameDataSize, SeekOrigin.Begin); } }
public void writeKMLFirstPerson(string filename) { StreamWriter stream = new StreamWriter(File.Open(filename, FileMode.Create)); System.Text.ASCIIEncoding encoding = new System.Text.ASCIIEncoding(); string header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><kml xmlns=\"http://www.opengis.net/kml/2.2\" xmlns:gx=\"http://www.google.com/kml/ext/2.2\" xmlns:kml=\"http://www.opengis.net/kml/2.2\" xmlns:atom=\"http://www.w3.org/2005/Atom\">\n <Document> <name>Paths</name> <description>Path</description>\n <Style id=\"yellowLineGreenPoly\"> <LineStyle> <color>7f00ffff</color> <width>4</width> </LineStyle> <PolyStyle> <color>7f00ff00</color> </PolyStyle> </Style>\n "; stream.Write(header); StringBuilder kml = new StringBuilder(); StringBuilder data = new StringBuilder(); double lastlat = 0; double lastlong = 0; int gpspackets = 0; int lastgpspacket = 0; foreach (Data mod in flightdata) { if (mod.model.Location.latitude == 0) { continue; } gpspackets++; if (lastlat == mod.model.Location.latitude && lastlong == mod.model.Location.longitude) { continue; } // double speed 0.05 - assumeing 10hz in log file // 1 speed = 0.1 10 / 1 = 0.1 data.Append(@" <gx:FlyTo> <gx:duration>" + ((gpspackets - lastgpspacket) * 0.1).ToString(new System.Globalization.CultureInfo("en-US")) + @"</gx:duration> <gx:flyToMode>smooth</gx:flyToMode> <Camera> <longitude>" + mod.model.Location.longitude.ToString(new System.Globalization.CultureInfo("en-US")) + @"</longitude> <latitude>" + mod.model.Location.latitude.ToString(new System.Globalization.CultureInfo("en-US")) + @"</latitude> <altitude>" + mod.model.Location.altitude.ToString(new System.Globalization.CultureInfo("en-US")) + @"</altitude> <roll>" + mod.model.Orientation.roll.ToString(new System.Globalization.CultureInfo("en-US")) + @"</roll> <tilt>" + (90 - mod.model.Orientation.tilt).ToString(new System.Globalization.CultureInfo("en-US")) + @"</tilt> <heading>" + mod.model.Orientation.heading.ToString(new System.Globalization.CultureInfo("en-US")) + @"</heading> <altitudeMode>absolute</altitudeMode> </Camera> </gx:FlyTo> "); lastlat = mod.model.Location.latitude; lastlong = mod.model.Location.longitude; lastgpspacket = gpspackets; } kml.Append(@" <Folder> <name>Flight</name> <gx:Tour> <name>Flight Do</name> <gx:Playlist> " + data + @"</gx:Playlist> </gx:Tour> </Folder> </Document> </kml> "); stream.Write(kml.ToString()); stream.Close(); // create kmz - aka zip file FileStream fs = File.Open(filename.Replace(".log-fp.kml", "-fp.kmz"), FileMode.Create); ZipOutputStream zipStream = new ZipOutputStream(fs); zipStream.SetLevel(9); //0-9, 9 being the highest level of compression zipStream.UseZip64 = UseZip64.Off; // older zipfile // entry 1 string entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction ZipEntry newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); File.Delete(filename); filename = Settings.GetRunningDirectory() + "block_plane_0.dae"; // entry 2 entryName = ZipEntry.CleanName(Path.GetFileName(filename)); // Removes drive from name and fixes slash direction newEntry = new ZipEntry(entryName); newEntry.DateTime = DateTime.Now; zipStream.PutNextEntry(newEntry); // Zip the file in buffered chunks // the "using" will close the stream even if an exception occurs buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); zipStream.IsStreamOwner = true; // Makes the Close also Close the underlying stream zipStream.Close(); positionindex = 0; modelist.Clear(); flightdata.Clear(); position = new List <Core.Geometry.Point3D> [200]; cmdraw.Clear(); }
static bool Unzip(string zipFilename, string outFolder, string[] searchFilenames, bool ignoreInnerPath, bool inRoot = false, string password = null) { int fileCount = 0; bool success = false; using (FileStream fs = File.OpenRead(zipFilename)) { using (ZipFile zf = new ZipFile(fs) { IsStreamOwner = true }) { // AES encrypted entries are handled automatically if (!String.IsNullOrEmpty(password)) { zf.Password = password; } foreach (ZipEntry zipEntry in zf) { // Ignore directories if (!zipEntry.IsFile) { continue; } for (int i = 0; i < searchFilenames.Length; i++) { searchFilenames[i] = searchFilenames[i].ToLowerInvariant(); } if (inRoot) { ignoreInnerPath = true; } String entryFileName = zipEntry.Name.ToLowerInvariant(); if (ignoreInnerPath) { entryFileName = Path.GetFileName(entryFileName); } // filename filter set? if (searchFilenames != null && !searchFilenames.Contains(entryFileName)) { continue; } // file has to be in root? if (inRoot && entryFileName.Contains("/")) { continue; } fileCount++; success = true; byte[] buffer = new byte[4096]; // 4K is optimum Stream zipStream = zf.GetInputStream(zipEntry); // Manipulate the output filename here as desired. String fullZipToPath = Path.Combine(outFolder, entryFileName); string directoryName = Path.GetDirectoryName(fullZipToPath); if (directoryName.Length > 0) { Directory.CreateDirectory(directoryName); } // Unzip file in buffered chunks. This is just as fast as unpacking to a buffer the full size // of the file, but does not waste memory. // The "using" will close the stream even if an exception occurs. using (FileStream streamWriter = File.Create(fullZipToPath)) { StreamUtils.Copy(zipStream, streamWriter, buffer); } // found all the files we need? if (searchFilenames != null && searchFilenames.Length == fileCount) { break; } } } } return(success); }
static async Task <string> Download() { var timer = new Timer(timerTick, null, 0, 1000); var fileNumber = 0; var tempFolder = Path.GetTempPath(); foreach (var file in Directory.GetFiles(tempFolder, "SEC-XBRL-0*.zip")) { try { File.Delete(file); } catch (IOException) { } } var key = new object(); ServicePointManager.DefaultConnectionLimit = 100; var rootFolder = "/SecData/xbrl"; var filesCompleted = new HashSet <string>(); for (int year = 2019; year < 2020; year++) { for (int month = 1; month < 13; month++) { var folder = Path.Combine(rootFolder, year.ToString("0000"), month.ToString("00")); //var di = Directory.CreateDirectory(folder); string data; var url = $"https://sec.gov/Archives/edgar/monthly/xbrlrss-{year}-{month:00}.xml"; try { data = await client.GetStringAsync(url); // forbidden is done } catch { data = null; } if (data != null) { var doc = XDocument.Parse(data); var all = from item in doc.Descendants() where item.Name.LocalName == "item" select new { title = item.DescendantsAndSelf().FirstOrDefault(e => e.Name.LocalName == "title") ?.Value, url = item.DescendantsAndSelf().FirstOrDefault(e => e.Name.LocalName == "enclosure") ?.Attributes().FirstOrDefault(u => u.Name == "url")?.Value, cik = item.DescendantsAndSelf().FirstOrDefault(e => e.Name.LocalName == "cikNumber") ?.Value, formType = item.DescendantsAndSelf().FirstOrDefault(e => e.Name.LocalName == "formType") ?.Value.Replace("/", "-"), fileNumber = item.DescendantsAndSelf() .FirstOrDefault(e => e.Name.LocalName == "fileNumber")?.Value, filingDate = item.DescendantsAndSelf() .FirstOrDefault(e => e.Name.LocalName == "filingDate")?.Value, xbrlFiles = item.DescendantsAndSelf() .FirstOrDefault(e => e.Name.LocalName == "xbrlFiles"), inline = item.DescendantsAndSelf()? .FirstOrDefault(e => e.Name.LocalName == "xbrlFiles")? .DescendantsAndSelf().Where(f => f.Name.LocalName == "xbrlFile").Attributes() .Any(a => a.Name.LocalName == "inlineXBRL" && a.Value == "true") }; var outstanding = 0; Parallel.ForEach(all, new ParallelOptions() { MaxDegreeOfParallelism = 20 }, async(item) => { var logEntry = new StringBuilder(); var isIxbrl = item.inline.HasValue && item.inline.Value; if (!isIxbrl) { var di = new DirectoryInfo(folder); var endFolderName = Path.Combine(di.FullName, $"{item.cik}.{item.formType}.{item.fileNumber}"); logEntry.AppendLine(endFolderName); if (!Directory.Exists(endFolderName)) { if (item.url != null) { var fileNumberValue = Interlocked.Increment(ref fileNumber); var tempFileName = Path.Combine(tempFolder, $"SEC-XBRL-{fileNumberValue:0000000000}.zip"); //var inline = (item.inline.HasValue && item.inline.Value) ? ".ixbrl" : ""; var inline = string.Empty; var filingDate = new string(item.filingDate .Select(c => (Path.GetInvalidFileNameChars().Contains(c) ? '_' : c)) .ToArray()); var filename = $"{item.cik}.{item.formType}.{filingDate}.{item.fileNumber}{inline}"; var incr = 0; var newFilename = filename; lock (key) while (filesCompleted.Contains(newFilename)) { newFilename = filename + "." + ++incr; logEntry.AppendLine($"\tDuplicate: {newFilename}"); } filename = newFilename; lock (key) filesCompleted.Add(filename); var folderName = Path.Combine(folder, filename); if (!Directory.Exists(folderName)) { logEntry.AppendLine($"\tGet: {outstanding}:{requestCounter} - {item.url}"); while (outstanding > 50) { Thread.Sleep(1000); } Interlocked.Increment(ref outstanding); Interlocked.Increment(ref requestCounter); while (requestCounter > 9) { Thread.Sleep(50); } var directoryInfo = Directory.CreateDirectory(folderName); byte[] xbrlData = null; try { var response = await client.GetAsync(item.url); await response.Content.ReadAsByteArrayAsync().ContinueWith( (bytes) => { logEntry.AppendLine($"\tWrite: {tempFileName}"); File.WriteAllBytes(tempFileName, bytes.Result); if (bytes.IsCompletedSuccessfully) { try { using (var fs = File.OpenRead(tempFileName)) { var zf = new ZipFile(fs); foreach (ZipEntry zipEntry in zf) { if (!zipEntry.IsFile) { continue; // Ignore directories } var entryFileName = zipEntry.Name; // to remove the folder from the entry:- entryFileName = Path.GetFileName(entryFileName); // Optionally match entrynames against a selection list here to skip as desired. // The unpacked length is available in the zipEntry.Size property. var buffer = new byte[65536]; var zipStream = zf.GetInputStream(zipEntry); // Manipulate the output filename here as desired. var fullZipToPath = Path.Combine( directoryInfo.FullName, entryFileName); var directoryName = Path.GetDirectoryName(fullZipToPath); if (directoryName.Length > 0) { Directory.CreateDirectory(directoryName); } if (!File.Exists(fullZipToPath)) { logEntry.AppendLine( $"\tExtract: {fullZipToPath}"); // Unzip file in buffered chunks. This is just as fast as unpacking to a buffer the full size // of the file, but does not waste memory. // The "using" will close the stream even if an exception occurs. using (var streamWriter = File.Create(fullZipToPath)) { StreamUtils.Copy(zipStream, streamWriter, buffer); } using (var db = new SecDataContext()) { var cik = new Cik { CikNumber = int.Parse(item.cik), CikText = item.cik }; var ciks = from rec in db.Ciks where rec.CikText == cik.CikText && rec.CikNumber == cik.CikNumber select rec; if (!ciks.Any()) { db.Ciks.Add(cik); db.SaveChanges(); } var filing = new Filing { } //db.Blogs.Add(blog); db.SaveChanges(); } } } fs.Close(); } logEntry.AppendLine($"\tDeleting: {tempFileName}"); File.Delete(tempFileName); } catch (ZipException ze) { logEntry.AppendLine(ze.Message); } } }); } catch (Exception ex) { logEntry.AppendLine(ex.Message); } if (xbrlData != null) { } Interlocked.Decrement(ref outstanding); } } } } logQueue.Enqueue(logEntry.ToString()); }); } } } return("Done"); }
public void startDownload(String outputPath, List <MediaItem> items) { TotalProgress = 0; TotalProgressMax = items.Count; foreach (ImageResultItem item in items) { if (CancellationToken.IsCancellationRequested) { throw new OperationCanceledException(CancellationToken); } String fullpath = null; String ext = "." + MediaFormatConvert.mimeTypeToExtension(item.ImageInfo.ContentType); try { String filename = Path.GetFileName(item.ImageInfo.MediaUrl); if (!filename.EndsWith(ext)) { filename = filename.Substring(0, filename.LastIndexOf('.')); filename += ext; } fullpath = FileUtils.getUniqueFileName(outputPath + "\\" + filename); } catch (Exception) { fullpath = FileUtils.getUniqueFileName(outputPath + "\\" + "image" + ext); } FileStream outFile = null; try { outFile = new FileStream(fullpath, FileMode.Create); string mimeType; ItemProgressMax = 1; ItemProgress = 0; ItemInfo = "Downloading: " + fullpath; StreamUtils.readHttpRequest(new Uri(item.ImageInfo.MediaUrl), outFile, out mimeType, CancellationToken, progressCallback); TotalProgress++; ItemProgressMax = 1; ItemProgress = 1; InfoMessages.Add("Downloaded: " + fullpath); outFile.Close(); } catch (Exception e) { InfoMessages.Add("Error downloading: " + fullpath + " " + e.Message); if (outFile != null) { outFile.Close(); File.Delete(fullpath); } return; } } }
public static int ToStream(BinaryWriter w, bool isLittleEndian, MetaDataIO meta) { IDictionary <string, string> additionalFields = meta.AdditionalFields; w.Write(Utils.Latin1Encoding.GetBytes(CHUNK_LIST)); long sizePos = w.BaseStream.Position; w.Write(0); // Placeholder for chunk size that will be rewritten at the end of the method w.Write(Utils.Latin1Encoding.GetBytes(PURPOSE_INFO)); // 'Classic' fields (NB : usually done within a loop by accessing MetaDataIO.tagData) IDictionary <string, string> writtenFields = new Dictionary <string, string>(); // Title string value = Utils.ProtectValue(meta.Title); if (0 == value.Length && additionalFields.Keys.Contains("info.INAM")) { value = additionalFields["info.INAM"]; } if (value.Length > 0) { writeSizeAndNullTerminatedString("INAM", value, w, writtenFields); } // Artist value = Utils.ProtectValue(meta.Artist); if (0 == value.Length && additionalFields.Keys.Contains("info.IART")) { value = additionalFields["info.IART"]; } if (value.Length > 0) { writeSizeAndNullTerminatedString("IART", value, w, writtenFields); } // Copyright value = Utils.ProtectValue(meta.Copyright); if (0 == value.Length && additionalFields.Keys.Contains("info.ICOP")) { value = additionalFields["info.ICOP"]; } if (value.Length > 0) { writeSizeAndNullTerminatedString("ICOP", value, w, writtenFields); } // Genre value = Utils.ProtectValue(meta.Genre); if (0 == value.Length && additionalFields.Keys.Contains("info.IGNR")) { value = additionalFields["info.IGNR"]; } if (value.Length > 0) { writeSizeAndNullTerminatedString("IGNR", value, w, writtenFields); } // Comment value = Utils.ProtectValue(meta.Comment); if (0 == value.Length && additionalFields.Keys.Contains("info.ICMT")) { value = additionalFields["info.ICMT"]; } if (value.Length > 0) { writeSizeAndNullTerminatedString("ICMT", value, w, writtenFields); } string shortKey; foreach (string key in additionalFields.Keys) { if (key.StartsWith("info.")) { shortKey = key.Substring(5, key.Length - 5).ToUpper(); if (!writtenFields.ContainsKey(key)) { if (additionalFields[key].Length > 0) { writeSizeAndNullTerminatedString(shortKey, additionalFields[key], w, writtenFields); } } } } long finalPos = w.BaseStream.Position; w.BaseStream.Seek(sizePos, SeekOrigin.Begin); if (isLittleEndian) { w.Write((int)(finalPos - sizePos - 4)); } else { w.Write(StreamUtils.EncodeBEInt32((int)(finalPos - sizePos - 4))); } return(14); }
protected override bool read(BinaryReader source, MetaDataIO.ReadTagParams readTagParams) { bool result = true; int maxPatterns = -1; byte nbSamples; string readString; StringBuilder comment = new StringBuilder(""); Sample sample; IList <IList <int> > pattern; IList <int> row; resetData(); // == TITLE == readString = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); if (readString.Equals(SIG_POWERPACKER)) { throw new InvalidDataException("MOD files compressed with PowerPacker are not supported yet"); } tagExists = true; // Restart from beginning, else parser might miss empty titles source.BaseStream.Seek(0, SeekOrigin.Begin); // Title = max first 20 chars; null-terminated string title = StreamUtils.ReadNullTerminatedStringFixed(source, System.Text.Encoding.ASCII, 20); if (readTagParams.PrepareForWriting) { structureHelper.AddZone(0, 20, new byte[20] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, ZONE_TITLE); } tagData.IntegrateValue(TagData.TAG_FIELD_TITLE, title.Trim()); AudioDataOffset = source.BaseStream.Position; AudioDataSize = sizeInfo.FileSize - AudioDataOffset; // == SAMPLES == nbSamples = detectNbSamples(source); string charOne = Utils.Latin1Encoding.GetString(new byte[] { 1 }); for (int i = 0; i < nbSamples; i++) { sample = new Sample(); sample.Name = StreamUtils.ReadNullTerminatedStringFixed(source, System.Text.Encoding.ASCII, 22).Trim(); sample.Name = sample.Name.Replace("\0", ""); sample.Name = sample.Name.Replace(charOne, ""); sample.Size = StreamUtils.DecodeBEUInt16(source.ReadBytes(2)) * 2; sample.Finetune = source.ReadSByte(); sample.Volume = source.ReadByte(); sample.RepeatOffset = StreamUtils.DecodeBEUInt16(source.ReadBytes(2)) * 2; sample.RepeatLength = StreamUtils.DecodeBEUInt16(source.ReadBytes(2)) * 2; FSamples.Add(sample); } // == SONG == nbValidPatterns = source.ReadByte(); source.BaseStream.Seek(1, SeekOrigin.Current); // Controversial byte; no real use here for (int i = 0; i < 128; i++) { FPatternTable.Add(source.ReadByte()); // Pattern table } // File format tag formatTag = Utils.Latin1Encoding.GetString(source.ReadBytes(4)).Trim(); if (modFormats.ContainsKey(formatTag)) { nbChannels = modFormats[formatTag].NbChannels; trackerName = modFormats[formatTag].Name; } else // Default { nbChannels = NB_CHANNELS_DEFAULT; LogDelegator.GetLogDelegate()(Log.LV_WARNING, "MOD format tag '" + formatTag + "'not recognized"); } // == PATTERNS == // Some extra information about the "FLT8" -type MOD's: // // These MOD's have 8 channels, still the format isn't the same as the // other 8 channel formats ("OCTA", "CD81", "8CHN"): instead of storing // ONE 8-track pattern, it stores TWO 4-track patterns per logical pattern. // i.e. The first 4 channels of the first logical pattern are stored in // the first physical 4-channel pattern (size 1kb) whereas channel 5 until // channel 8 of the first logical pattern are stored as the SECOND physical // 4-channel pattern. Got it? ;-). // If you convert all the 4 channel patterns to 8 channel patterns, do not // forget to divide each pattern nr by 2 in the pattern sequence table! foreach (byte b in FPatternTable) { maxPatterns = Math.Max(maxPatterns, b); } for (int p = 0; p < maxPatterns + 1; p++) // Patterns loop { FPatterns.Add(new List <IList <int> >()); pattern = FPatterns[FPatterns.Count - 1]; // Rows loop for (int l = 0; l < MAX_ROWS; l++) { pattern.Add(new List <int>()); row = pattern[pattern.Count - 1]; for (int c = 0; c < nbChannels; c++) // Channels loop { row.Add(StreamUtils.DecodeBEInt32(source.ReadBytes(4))); } // end channels loop } // end rows loop } // end patterns loop // == Computing track properties duration = calculateDuration(); foreach (Sample aSample in FSamples) { if (aSample.Name.Length > 0) { comment.Append(aSample.Name).Append(Settings.InternalValueSeparator); } } if (comment.Length > 0) { comment.Remove(comment.Length - 1, 1); } tagData.IntegrateValue(TagData.TAG_FIELD_COMMENT, comment.ToString()); bitrate = sizeInfo.FileSize / duration; return(result); }
void ExtractFileEntry(ZipEntry entry, string targetName) { bool proceed = true; if (overwrite_ != Overwrite.Always) { if (File.Exists(targetName)) { if ((overwrite_ == Overwrite.Prompt) && (confirmDelegate_ != null)) { proceed = confirmDelegate_(targetName); } else { proceed = false; } } } if (proceed) { if (events_ != null) { continueRunning_ = events_.OnProcessFile(entry.Name); } if (continueRunning_) { try { using (FileStream outputStream = File.Create(targetName)) { if (buffer_ == null) { buffer_ = new byte[4096]; } if ((events_ != null) && (events_.Progress != null)) { StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_, events_.Progress, events_.ProgressInterval, this, entry.Name); } else { StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_); } if (events_ != null) { continueRunning_ = events_.OnCompletedFile(entry.Name); } } #if !NETCF_1_0 && !NETCF_2_0 if (restoreDateTimeOnExtract_) { File.SetLastWriteTime(targetName, entry.DateTime); } if (RestoreAttributesOnExtract && entry.IsDOSEntry && (entry.ExternalFileAttributes != -1)) { FileAttributes fileAttributes = (FileAttributes)entry.ExternalFileAttributes; // TODO: FastZip - Setting of other file attributes on extraction is a little trickier. fileAttributes &= (FileAttributes.Archive | FileAttributes.Normal | FileAttributes.ReadOnly | FileAttributes.Hidden); File.SetAttributes(targetName, fileAttributes); } #endif } catch (Exception ex) { if (events_ != null) { continueRunning_ = events_.OnFileFailure(targetName, ex); } else { continueRunning_ = false; } } } } }
void DownloadLevel2CSVNew(DownloadQuotesEnumerator enumerator, ZipOutputStream zipStream) { string filename = "ticks level2.csv"; using (StreamWriter file = File.CreateText(filename)) { currentTempFile = filename; DateTime lastQuoteTime = DateTime.MinValue; int repeatingQuotes = 0; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy.MM.dd HH:mm:ss.fff", CultureInfo.InvariantCulture)); if (quote.CreatingTime == lastQuoteTime) { repeatingQuotes++; } else { lastQuoteTime = quote.CreatingTime; repeatingQuotes = 0; } if (repeatingQuotes > 0) { builder.Append(-repeatingQuotes); } builder.Append(","); int i = 0; while (i < quote.Bids.Count || i < quote.Asks.Count) { if (i < quote.Bids.Count) { builder.Append($"{quote.Bids[i].Price},{quote.Bids[i].Volume},"); } else { builder.Append(",,"); } if (i < quote.Asks.Count) { builder.Append($"{quote.Asks[i].Price},{quote.Asks[i].Volume},"); } else { builder.Append(",,"); } i++; } builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } FileInfo fi = new FileInfo(filename); ZipEntry newEntry = new ZipEntry("ticks level2.csv"); newEntry.Size = fi.Length; zipStream.PutNextEntry(newEntry); byte[] buffer = new byte[4096]; using (FileStream streamReader = File.OpenRead(filename)) { StreamUtils.Copy(streamReader, zipStream, buffer); } zipStream.CloseEntry(); File.Delete("ticks level2.csv"); currentTempFile = null; }