public void TestEquals150() { FrameCollection frames = new FrameCollection(); Repeat(() => { frames.Add(new Frame("5", "/")); }, 10); frames.Add(new Frame(5)); Assert.IsTrue(frames.Score() == 155); }
public void TestEquals300() { FrameCollection frames = new FrameCollection(); Repeat(() => { frames.Add(new Frame(10,0)); },10); frames.Add(new Frame(10, true)); frames.Add(new Frame(10, true)); Assert.IsTrue(frames.Score() == 300); }
/// <summary> /// Create new STempoCodes /// </summary> /// <param name="FrameID">4 Characters tag identifier</param> /// <param name="Flags">2 Bytes flags identifier</param> /// <param name="Data">Contain Data for this frame</param> /// <param name="Length"></param> public SynchronisedTempoFrame(string FrameID, FrameFlags Flags, int Length, Stream FS) : base(FrameID, Flags, FS) { _TempoCodes = new FrameCollection <TempoCode>("Temnpo Codes"); TStream = new BreadPlayer.Tags.TagStreamUWP(FS); _TimeStamp = (TimeStamps)TStream.ReadByte(FS); if (IsValidEnumValue(_TimeStamp, ExceptionLevels.Error, FrameID)) { return; } int Tempo; uint Time; while (Length > 4) { Tempo = TStream.ReadByte(FS); Length--; if (Tempo == 0xFF) { Tempo += TStream.ReadByte(FS); Length--; } Time = TStream.ReadUInt(4); Length -= 4; _TempoCodes.Add(FrameID, new TempoCode(Tempo, Time)); } }
/// <summary> /// Create new Equalisation frame /// </summary> /// <param name="FrameID">4 characters frame identifer of current Frame class</param> /// <param name="Flags">Frame Flags</param> /// <param name="Data">TagStream to read frame from</param> /// <param name="Length">Maximum length to read frame</param> public Equalisation(string FrameID, FrameFlags Flags, int Length, Stream FS) : base(FrameID, Flags, FS) { _Frequensies = new FrameCollection <FrequencyAdjustmentFrame>("Frequency Adjustment"); TStream = new BreadPlayer.Tags.TagStreamUWP(FS); _AdjustmentBits = TStream.ReadByte(FS); Length--; if (_AdjustmentBits == 0) { ExceptionOccured(new ID3Exception("Adjustment bit of Equalisation is zero. this frame is invalid", FrameID, ExceptionLevels.Error)); return; } if (_AdjustmentBits % 8 != 0 || _AdjustmentBits > 32) { ExceptionOccured(new ID3Exception("AdjustmentBit of Equalisation Frame is out of supported range of this program", FrameID, ExceptionLevels.Error)); return; } int AdLen = _AdjustmentBits / 8; int FreqBuf; uint AdjBuf; while (Length > 3) { FreqBuf = Convert.ToInt32(TStream.ReadUInt(2)); AdjBuf = TStream.ReadUInt(AdLen); _Frequensies.Add(FrameID, new FrequencyAdjustmentFrame(FreqBuf, AdjBuf)); Length -= 2 + AdLen; } }
private bool OpenTrace <T>(String path) where T : ITrace, new() { if (File.Exists(path)) { using (Stream stream = File.OpenRead(path)) { T trace = new T(); trace.Init(path, stream); frames.AddGroup(trace.MainGroup); frames.Add(trace.MainFrame); FocusOnFrame(trace.MainFrame); } return(true); } return(false); }
public void TestEquals90() { FrameCollection frames = new FrameCollection(); Repeat(() => { frames.Add(new Frame("9", "-")); }, 10); Assert.IsTrue(frames.Score() == 90); }
static void Main(string[] args) { FrameCollection frames = new FrameCollection(); int count = 0; do { count++; Console.WriteLine("In frame {0}, Please enter the pins knowked down...", count); Console.Write("...on the first throw:"); string first = Console.ReadLine(); string second = "0"; if (first != "10" && first !="X" && first != "x") { Console.Write("...on the second throw:"); second = Console.ReadLine(); } frames.Add(new Frame(first,second)); } while (count < 10); if (frames[9].IsStrike || frames[9].IsSpare) { Console.WriteLine("Bonus frame #1!"); Console.Write("Enter pins knocked down on 1st bonus frame:"); string bonusScore1 = Console.ReadLine(); Frame bonus1 = new Frame(bonusScore1,true); frames.Add(bonus1); if (bonus1.IsStrike && frames[9].IsStrike) { Console.WriteLine("Bonus frame #2!"); Console.Write("Enter pins knocked down on 2nd bonus frame:"); string bonusScore2 = Console.ReadLine(); frames.Add(new Frame(bonusScore2,true)); } } Console.WriteLine("The final score was: {0}", frames.Score()); Console.ReadLine(); }
private bool ApplyResponse(DataResponse response) { if (response.Version == NetworkProtocol.NETWORK_PROTOCOL_VERSION) { //SaveTestResponse(response); switch (response.ResponseType) { case DataResponse.Type.ReportProgress: Int32 length = response.Reader.ReadInt32(); StatusText.Text = new String(response.Reader.ReadChars(length)); break; case DataResponse.Type.NullFrame: lock (frames) { frames.Flush(); if (frames.Count > 0) { frameList.SelectedItem = frames[frames.Count - 1]; ScrollToEnd(); } } break; case DataResponse.Type.Handshake: ETWStatus status = (ETWStatus)response.Reader.ReadUInt32(); KeyValuePair <string, string> warning; if (statusToError.TryGetValue(status, out warning)) { ShowWarning(warning.Key, warning.Value); } break; default: StatusText.Visibility = System.Windows.Visibility.Collapsed; lock (frames) { frames.Add(response.ResponseType, response.Reader); //ScrollToEnd(); } break; } } else { MessageBox.Show("Invalid NETWORK_PROTOCOL_VERSION"); return(false); } return(true); }
/// <summary> /// New SynchronisedText /// </summary> /// <param name="FrameID">FrameID</param> /// <param name="Flags">Frame Flag</param> /// <param name="Data">FileStream contain current frame data</param> /// <param name="Length">Maximum availabel length for this frame</param> public SynchronisedText(string FrameID, FrameFlags Flags, int Length, Stream FS) : base(FrameID, Flags, FS) { _Syllables = new FrameCollection <Syllable>("Syllables"); TStream = new BreadPlayer.Tags.TagStreamUWP(FS); TextEncoding = (TextEncodings)TStream.ReadByte(FS); if (!IsValidEnumValue(TextEncoding, ExceptionLevels.Error, FrameID)) { return; } Length--; Language = new Language(TStream.FS); Length -= 3; _TimeStamp = (TimeStamps)TStream.ReadByte(FS); if (!IsValidEnumValue(_TimeStamp, ExceptionLevels.Error, FrameID)) { return; } Length--; _ContentType = (ContentTypes)TStream.ReadByte(FS); if (!IsValidEnumValue(_ContentType)) { _ContentType = ContentTypes.Other; } Length--; // use Text variable for descriptor property Text = TStream.ReadText(Length, TextEncoding, ref Length, true); string tempText; uint tempTime; while (Length > 5) { tempText = TStream.ReadText(Length, TextEncoding, ref Length, true); tempTime = TStream.ReadUInt(4); _Syllables.Add(FrameID, new Syllable(tempTime, tempText)); Length -= 4; } }
private void OnMenuItemClick_Export(object sender, RoutedEventArgs e) { FrameCollection frames = new FrameCollection(); frames.Add(frameList.SelectedItem as Frame); var option = new ArchiveOption { Mode = ArchiveMode.Save, Sources = new List <IArchiveSource> { new FrameArchiveSource(frames) }, ArchiveType = ArchiveSourceType.Frame }; ArchiveFactory.Instance().Archive(ref option); }
private void FrameReader_FrameArrived(ExampleMediaFrameReader sender, ExampleMediaFrameArrivedEventArgs args) { try { // Access the latest frame // More information: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#handle-the-frame-arrived-event // ExampleMediaFrameReference interface is based on Windows.Media.Core.FaceDetectionEffect.FaceDetected // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereference using (var mediaFrameReference = sender.TryAcquireLatestFrameBySourceKind(args.SourceKind)) { FrameCollection.Add(mediaFrameReference); } } catch (ObjectDisposedException) { } finally { } }
/// <summary> /// Load spefic frame information /// </summary> /// <param name="FrameID">FrameID to load</param> /// <param name="FileAddress">FileAddress to read tag from</param> private void LoadFrameFromFile(string FrameID, string FileAddress) { ID3v2 LinkedInfo = new ID3v2(false, TStream.FS); LinkedInfo.Filter.Add(FrameID); LinkedInfo.FilterType = FilterTypes.LoadFiltersOnly; LinkedInfo.Load(); if (LinkedInfo.HaveError) { foreach (ID3Exception IE in LinkedInfo.Errors) { _Errors.Add(new ID3Exception("In Linked Info(" + FileAddress + "): " + IE.Message, IE.FrameID, IE.Level)); } } foreach (FrameCollectionBase Coll in LinkedInfo._CollectionFrames) { if (Coll.Name == CollectionIndex.Link.ToString()) { continue; } foreach (Frame Fr in Coll) { FrameCollection <Frame> Temp = (FrameCollection <Frame>)_CollectionFrames[ Enum.Parse(typeof(CollectionIndex), Coll.Name)]; Temp.Add(Fr.FrameID, Fr); } } foreach (Frame In in (Frame[])LinkedInfo._SingleFrames.Values) { if (_SingleFrames.ContainsKey(In.FrameID)) { _SingleFrames.Remove(In); } _SingleFrames.Add(In.FrameID, LinkedInfo._SingleFrames[In]); } }
public static void RecieveMessage() { while (true) { DataResponse response = ProfilerClient.Get().RecieveMessage(); if (response != null) { // Handle the response if (response.Version == NetworkProtocol.NETWORK_PROTOCOL_VERSION) { switch (response.ResponseType) { case DataResponse.Type.ReportProgress: Int32 length = response.Reader.ReadInt32(); System.Console.WriteLine("Progress: " + new String(response.Reader.ReadChars(length))); break; case DataResponse.Type.NullFrame: lock (frames) { frames.Flush(); } break; case DataResponse.Type.Handshake: break; default: lock (frames) { frames.Add(response.ResponseType, response.Reader); } break; } } } else { Thread.Sleep(1000); } } }
/// <summary> /// Create new EventTimingCodeFrame /// </summary> /// <param name="FrameID">FrameID</param> /// <param name="Flags">Flags of frame</param> /// <param name="Data">TagStream to read data from</param> /// <param name="Length">Maximum available length</param> public EventTimingCodeFrame(string FrameID, FrameFlags Flags, int Length, Stream FS) : base(FrameID, Flags, FS) { _Events = new FrameCollection <EventCode>("EventCode"); TStream = new BreadPlayer.Tags.TagStreamUWP(FS); _TimeStamp = (TimeStamps)TStream.ReadByte(FS); if (!IsValidEnumValue(_TimeStamp, ExceptionLevels.Error, FrameID)) { return; } Length--; while (Length >= 5) { _Events.Add(FrameID, new EventCode(TStream.ReadByte(FS), TStream.ReadUInt(4))); Length -= 5; } }
/// <summary>Processes raw data to populate the resource</summary> /// <param name="raw">Raw byte data</param> /// <param name="containsHeader">Whether or not <i>raw</i> contains the resource Header information</param> /// <exception cref="ArgumentException">Header-defined <see cref="Type"/> is not <see cref="Resource.ResourceType.Anim"/></exception> public override void DecodeResource(byte[] raw, bool containsHeader) { _decodeResource(raw, containsHeader); if (_type != ResourceType.Anim) { throw new ArgumentException("Raw header is not for an Anim resource"); } //System.Diagnostics.Debug.WriteLine("decoding..."); short numberOfFrames = BitConverter.ToInt16(_rawData, 0); _frames = new FrameCollection(this); for (int i = 0; i < numberOfFrames; i++) { _frames.Add(new Frame(this)); } int frameLength; int offset = 2; for (int i = 0; i < NumberOfFrames; i++) { //System.Diagnostics.Debug.WriteLine("frame " + i + ", offset " + offset); frameLength = BitConverter.ToInt32(_rawData, offset); byte[] delt = new byte[frameLength]; ArrayFunctions.TrimArray(_rawData, offset + 4, delt); //System.Diagnostics.Debug.WriteLine("Frame offset: " + offset); _frames[i]._delt.DecodeResource(delt, false); if (HasDefinedPalette) { _frames[i]._delt.Palette = _palette; } offset += frameLength + 4; } _recalculateDimensions(); //System.Diagnostics.Debug.WriteLine("Anim LTWH: " + _left + ", " + _top + ", " + _width + ", " + _height); //System.Diagnostics.Debug.WriteLine("... complete"); }
/// <summary>Creates a new Act image from bitmap</summary> /// <remarks><see cref="FilePath"/> defaults to <b>"NewImage.act"</b>, <see cref="Frames"/> is initialized as a single <see cref="Frame"/> using <i>image</i>.<br/> /// <see cref="Center"/> defaults to the center pixel of <i>image</i>.</remarks> /// <param name="image">Initial <see cref="PixelFormat.Format8bppIndexed"/> image to be used</param> /// <exception cref="Idmr.Common.BoundaryException"><i>image</i> exceeds allowable size</exception> public ActImage(Bitmap image) { _frames = new FrameCollection(this); _frames.Add(new Frame(this, image)); _filePath = "NewImage.act"; _center = new Point(Width/2, Height/2); _frames[0].Location = new Point(-Center.X, -Center.Y); }
/// <summary>Processes raw data to populate the resource</summary> /// <param name="raw">Raw byte data</param> /// <param name="containsHeader">Whether or not <i>raw</i> contains the resource Header information</param> /// <exception cref="ArgumentException">Header-defined <see cref="Type"/> is not <see cref="ResourceType.Anim"/></exception> public override void DecodeResource(byte[] raw, bool containsHeader) { _decodeResource(raw, containsHeader); if (_type != ResourceType.Anim) throw new ArgumentException("Raw header is not for an Anim resource"); //System.Diagnostics.Debug.WriteLine("decoding..."); short numberOfFrames = BitConverter.ToInt16(_rawData, 0); _frames = new FrameCollection(this); for (int i = 0; i < numberOfFrames; i++) _frames.Add(new Frame(this)); int frameLength; int offset = 2; for (int i = 0; i < NumberOfFrames; i++) { //System.Diagnostics.Debug.WriteLine("frame " + i + ", offset " + offset); frameLength = BitConverter.ToInt32(_rawData, offset); byte[] delt = new byte[frameLength]; ArrayFunctions.TrimArray(_rawData, offset + 4, delt); //System.Diagnostics.Debug.WriteLine("Frame offset: " + offset); _frames[i]._delt.DecodeResource(delt, false); if (HasDefinedPalette) _frames[i]._delt.Palette = _palette; offset += frameLength + 4; } _recalculateDimensions(); //System.Diagnostics.Debug.WriteLine("Anim LTWH: " + _left + ", " + _top + ", " + _width + ", " + _height); //System.Diagnostics.Debug.WriteLine("... complete"); }
private bool ApplyResponse(DataResponse response) { if (response.Version >= NetworkProtocol.NETWORK_PROTOCOL_MIN_VERSION) { //SaveTestResponse(response); switch (response.ResponseType) { case DataResponse.Type.ReportProgress: Int32 length = response.Reader.ReadInt32(); StatusText.Text = new String(response.Reader.ReadChars(length)); break; case DataResponse.Type.NullFrame: RaiseEvent(new CancelConnectionEventArgs()); StatusText.Visibility = System.Windows.Visibility.Collapsed; lock (frames) { frames.Flush(); ScrollToEnd(); } break; case DataResponse.Type.Handshake: TracerStatus status = (TracerStatus)response.Reader.ReadUInt32(); KeyValuePair <string, string> warning; if (statusToError.TryGetValue(status, out warning)) { RaiseEvent(new ShowWarningEventArgs(warning.Key, warning.Value)); } if (response.Version >= NetworkProtocol.NETWORK_PROTOCOL_VERSION_23) { Platform.Connection connection = new Platform.Connection() { Address = response.Source.Address.ToString(), Port = response.Source.Port }; Platform.Type target = Platform.Type.Unknown; String targetName = Utils.ReadBinaryString(response.Reader); Enum.TryParse(targetName, true, out target); connection.Target = target; connection.Name = Utils.ReadBinaryString(response.Reader); RaiseEvent(new NewConnectionEventArgs(connection)); } break; default: lock (frames) { frames.Add(response); //ScrollToEnd(); } break; } } else { RaiseEvent(new ShowWarningEventArgs("Invalid NETWORK_PROTOCOL_VERSION", String.Empty)); return(false); } return(true); }
/// <summary>Populates the Act object from the raw byte data</summary> /// <param name="raw">Entire contents of an *.ACT file</param> /// <exception cref="ArgumentException">Data validation failure</exception> public void DecodeFile(byte[] raw) { ArrayFunctions.TrimArray(raw, 0, _header); if (BitConverter.ToInt32(_header, 0x10) != _fileHeaderLength) throw new ArgumentException(_validationErrorMessage, "raw"); _center = new Point(BitConverter.ToInt32(_header, 0x24), BitConverter.ToInt32(_header, 0x28)); int numFrames = BitConverter.ToInt32(_header, 0x18); int[] frameOffsets = new int[numFrames]; System.Diagnostics.Debug.WriteLine("Frames: " + numFrames); ArrayFunctions.TrimArray(raw, _fileHeaderLength, frameOffsets); _frames = new FrameCollection(this); // Frames for (int f = 0; f < numFrames; f++) { // FrameHeader byte[] rawFrame = new byte[BitConverter.ToInt32(raw, frameOffsets[f])]; ArrayFunctions.TrimArray(raw, frameOffsets[f], rawFrame); _frames.Add(new Frame(this, rawFrame)); } // EOF }