Пример #1
0
        /// <summary>
        /// Fills local buffer float[channel, point] called bigBuff with data from BDFEDFRecord[] called records;
        /// decimates by factor decimation; re-references data as specified in reference information;
        /// this is a specific local routine only; not for "public" consumption!
        /// </summary>
        /// <param name="start">BDFLoc to start filling from</param>
        /// <param name="end">BDFLoc to stop filling</param>
        /// <returns>true if bigBuff completely filled before end reached; false if not</returns>
        /// <remarks>also updates parameter start to indicate next point that will be read into bigBuff on next call</remarks>
        protected bool fillBuffer(ref BDFLoc start, BDFLoc end)
        {
            if (!start.IsInFile) return false; //start of record outside of file coverage; so skip it
            BDFLoc endPt = start + newRecordLengthPts * decimation; //calculate ending point
            if (endPt.greaterThanOrEqualTo(end) || !endPt.IsInFile) return false; //end of record outside of file coverage

            /***** Read correct portion of EDF+ file, decimate, and reference *****/
            for (int pt = 0; pt < newRecordLengthPts; pt++, start += decimation)
                for (int c = 0; c < edfPlus.NumberOfChannels - 1; c++)
                    bigBuff[c, pt] = (float)records[start.Rec].getConvertedPoint(c, start.Pt);
            calculateReferencedData();
            return true;
        }
Пример #2
0
        public void reDrawChannels()
        {
            this.Cursor = Cursors.Wait;
            List<ChannelCanvas> chans = currentChannelList;

            double currentLowSecs = newDisplayOffsetInSecs;
            double currentHighSecs = currentLowSecs + newDisplayWidthInSecs;
            double oldLowSecs = oldDisplayOffsetInSecs;
            double oldHighSecs = oldDisplayOffsetInSecs + oldDisplayWidthInSecs;
            oldDisplayWidthInSecs = newDisplayWidthInSecs;

            DW.Text = newDisplayWidthInSecs.ToString("0.000"); //update current display width text

            BDFEDFFileStream.BDFLoc lowBDFP = bdf.LocationFactory.New().FromSecs(currentLowSecs); //where we'll find the points for new display in BDF
            BDFEDFFileStream.BDFLoc highBDFP = bdf.LocationFactory.New().FromSecs(currentHighSecs);

            //calculate new decimation, depending on seconds displayed and viewer width

            double numberOfBDFPointsToRepresent = (double)(highBDFP - lowBDFP);
            ChannelCanvas.decimateNew = Convert.ToInt32(Math.Ceiling(2.5D * numberOfBDFPointsToRepresent / Viewer.ActualWidth)); //undersampling a bit for min/max approach
            if (ChannelCanvas.decimateNew == 2) ChannelCanvas.decimateNew = 1; //No advantage to decimating by 2

            bool completeRedraw = ChannelCanvas.decimateNew != ChannelCanvas.decimateOld; //complete redraw of all channels if ...
            // change in decimation or if completely new screen (no overlap of old and new)

            //determine if overlap of new display with old and direction of scroll to determine trimming
            bool scrollingRight = currentHighSecs > oldLowSecs && currentHighSecs < oldHighSecs; //implies scrolling to right; will add points on left (low end), remove from right (high end)
            bool scrollingLeft = currentLowSecs > oldLowSecs && currentLowSecs < oldHighSecs; //implies scrolling to left; will add points on right (high end), remove from left (low end)
            completeRedraw = completeRedraw || !(scrollingLeft || scrollingRight); //redraw if no overlap

            int removeLow = 0;
            int removeHigh = 0;
            if (!completeRedraw)
            {
                //calculate number of points to remove above and below current point set
                List<PointListPoint> s = chans[0].PointList; //finding cut points works in any channel PointList
                //now loop through each channel graph to remove unneeded points
                //Use this information to determine bounds of current display and to caluculate size of
                //non-overlap lower and higher than current display
                if (s.Count > 0)
                {
                    if (scrollingRight)
                    {
                        removeHigh = -s.FindIndex(p => p.X >= currentHighSecs); //where to start removing from high end of data points
                        if (removeHigh <= 0)
                        {
                            removeHigh += s.Count;
                            if (ChannelCanvas.decimateOld != 1)
                                removeHigh = (removeHigh / 2) * 2 + 2;
                        }
                        else
                            removeHigh = 0;                       
                    }

                    if (scrollingLeft)
                    {
                        removeLow = s.FindIndex(p => p.X >= currentLowSecs); //how many to remove from low end of data points
                        if (removeLow >= 0)
                        {
                            if (ChannelCanvas.decimateOld != 1)
                                removeLow = (removeLow / 2) * 2;
                        }
                        else
                            removeLow = s.Count;
                    }
                }
#if DEBUG
                Console.WriteLine("Low=" + removeLow + " High=" + removeHigh + " Count=" + s.Count + " Dec=" + ChannelCanvas.decimateOld);
#endif
                completeRedraw = (removeHigh + removeLow) >= s.Count;
            }
            ChannelCanvas.decimateOld = ChannelCanvas.decimateNew;

            foreach (ChannelCanvas cc in chans) //now use this information to reprocess channels
            {
                if (completeRedraw) //shortcut, if complete redraw
                    cc.PointList.Clear();
                else //then this channel may only require partial redraw:
                {
                    if (removeLow > 0) //then must remove removed below; same as scrollLeft
                        cc.PointList.RemoveRange(0, removeLow);
                    if (removeHigh > 0) //then must remove points above
                        cc.PointList.RemoveRange(cc.PointList.Count - removeHigh, removeHigh);
                    completeRedraw = completeRedraw || cc.PointList.Count == 0; //update completeRedraw, just in case!
                }
            }

//********* now, update the point list as required; there are three choices:
            //NB: using parallel computation makes little difference and causes some other difficulties, probably due to disk contention
            //NB: we process by point, then channel to reduce amount of disk seeking; most channels will be in buffer after first seek
            if (completeRedraw)
            {
                //**** 1. Redraw everything
                for (BDFEDFFileStream.BDFLoc i = lowBDFP; i.lessThan(highBDFP) && i.IsInFile; i.Increment(ChannelCanvas.decimateNew))
                    foreach (ChannelCanvas cc in chans)
                        cc.createMinMaxPoints(i, true);
                lastBDFLocLow = lowBDFP;
                lastBDFLocHigh = highBDFP;
            }
            else
            {
                lastBDFLocLow = lastBDFLocLow + (ChannelCanvas.decimateOld == 1 ? removeLow : (removeLow / 2) * ChannelCanvas.decimateOld);
                lastBDFLocHigh = lastBDFLocHigh - (ChannelCanvas.decimateOld == 1 ? removeHigh : (removeHigh / 2) * ChannelCanvas.decimateOld);
                //**** 2. Add points as needed below current point list
                BDFEDFFileStream.BDFLoc i;
                for (i = lastBDFLocLow - ChannelCanvas.decimateNew;
                    lowBDFP.lessThan(i) && i.IsInFile; i.Decrement(ChannelCanvas.decimateNew)) //start at first point below current range
                {
                    foreach (ChannelCanvas cc in chans)
                        cc.createMinMaxPoints(i, false);
                    lastBDFLocLow = i;
                }
               
                //**** 3. Add points as needed above current point list
                for (i = lastBDFLocHigh + ChannelCanvas.decimateNew;
                                i.lessThan(highBDFP) && i.IsInFile; i.Increment(ChannelCanvas.decimateNew)) //start at first point above current range
                // and work up to highBDFP
                {
                    foreach (ChannelCanvas cc in chans)
                        cc.createMinMaxPoints(i, true);
                    lastBDFLocHigh = i;
                }
            }

            foreach(ChannelCanvas cc in currentChannelList)
            {
                double my = 0D;
                double mx = 0D;
                foreach (PointListPoint p in cc.PointList)
                {
                    my += p.rawY;
                    mx += p.X;
                }
                mx /= cc.PointList.Count;
                my /= cc.PointList.Count;
                double sxy = 0D;
                double sx2 = 0D;
                foreach (PointListPoint p in cc.PointList)
                {
                    sxy += (p.X - mx) * (p.rawY - my);
                    sx2 += Math.Pow(p.X - mx, 2);
                }
                cc.B = sxy/sx2;
                cc.A = my - cc.B * mx;
                cc.overallMax = double.MinValue;
                cc.overallMin = double.MaxValue;
                for (int i = 0; i < cc.PointList.Count; i++)
                {
                    PointListPoint p = cc.PointList[i];
                    p.Y = p.rawY - cc.A - cc.B * p.X;
                    cc.overallMax = Math.Max(p.Y, cc.overallMax);
                    cc.overallMin = Math.Min(p.Y, cc.overallMin);
                    cc.PointList[i] = p;
                }
            }
            //Now, we've got the points we need to plot each of the channels
            //Here is where we have to remove offset and trends
            //Keep track of:
            //  1. If any points are added or removed, must recalculate trend and offset
            //  2. If not #1, if vertical rescale multiply slope by scale change
            //  3. If not #1, if horizontal resizing, leave slope the same (assume added points from change in decimation won't make a difference
            //Also keep track of new min and max values for each


            for (int graphNumber = 0; graphNumber < chans.Count;  graphNumber++)
            {
                ChannelCanvas cc = chans[graphNumber];
                //calculate and set appropriate stroke thickness
                cc.path.StrokeThickness = newDisplayWidthInSecs * 0.0006D;

                cc.rescalePoints(); //create new pointList
                //and install it in window
                ChannelCanvas.OldCanvasHeight = ChannelCanvas.nominalCanvasHeight; //reset
                StreamGeometryContext ctx = cc.geometry.Open();
                ctx.BeginFigure(cc.pointList[0], false, false);
                ctx.PolyLineTo(cc.pointList, true, true);
                ctx.Close();
                cc.Height = ChannelCanvas.nominalCanvasHeight / XScaleSecsToInches;
                Canvas.SetTop(cc, (double)graphNumber * ChannelCanvas.nominalCanvasHeight);
                if(showOOSMarks)
                    markChannelRegions(cc);
            }
            this.Cursor = Cursors.Arrow;
        } //End redrawChannels
Пример #3
0
        private void runBDFtoEvent(BDFLoc lastEventLocation, ref BDFLoc nextEventLocation, InputEvent evt)
        {
            nextEventLocation += decimation - 1; //correct location so we know where to stop; warning: it's tricky!
            nextEventLocation.Pt /= decimation; //location should be next after actual Event to keep decimation on track
            nextEventLocation.Pt *= decimation; //this also works because decimation must be a factor of the record length
            int pt = lastEventLocation.Pt / decimation;
            int j = lastEventLocation.Pt;
            int k;
            int p = 0;
            double[] buff = new double[BDF.NumberOfChannels-1];
            double[] references = null;
            if (referenceChannels != null) references = new double[referenceChannels.Count];
            for (int rec = lastEventLocation.Rec; rec <= nextEventLocation.Rec; rec++)
            {
                if (BDF.read(rec) == null) return; // only happen on last call to fill out record
                if (rec == nextEventLocation.Rec) k = nextEventLocation.Pt;
                else k = BDF.NSamp;
                for (p = j; p < k; p += decimation, pt++)
                {
                    for (int c = 0; c < BDF.NumberOfChannels - 1; c++)
                        buff[c] = BDF.getSample(c, p);
                    if (referenceChannels != null) // then some channels need reference correction
                    {
                        //First calculate all needed references for this point
                        for (int i1 = 0; i1 < referenceChannels.Count; i1++)
                        {
                            references[i1] = 0.0D; //zero them out
                            foreach (int chan in referenceChannels[i1]) references[i1] += buff[chan]; //add them up
                            references[i1] /= (double)referenceChannels[i1].Count; //divide to get average
                        }

                        //Then, subtract them from each channel in each channel group
                        float refer;
                        for (int i1 = 0; i1 < referenceGroups.Count; i1++)
                        {
                            refer = (float)references[i1];
                            for (int i2 = 0; i2 < referenceGroups[i1].Count; i2++) buff[referenceGroups[i1][i2]] -= refer;
                        }
                    }
                    for (int c = 0; c < BDFWriter.NumberOfChannels - 1; c++)
                        BDFWriter.putSample(c, pt, (float)(buff[channels[c]]));
                    newStatus[pt] = lastStatus;
                }
                if (rec != nextEventLocation.Rec)
                {
                    BDFWriter.putStatus(newStatus);
                    BDFWriter.write();
                }
                j = 0; // OK because decimation has to be integer divisor of the sampling rate
                pt = 0; // so that these two remain in lock-step => no offset to calculate
            }

            /***** Get group variable for this record *****/
            string s = evt.GVValue[EDE.GroupVars.FindIndex(n => n.Equals(GV0))]; //Find value for this GV
            if (GV0.GVValueDictionary != null)
                lastStatus = GV0.GVValueDictionary[s]; //Lookup in GV value dictionary to convert to integer
            else
                lastStatus = Convert.ToInt32(s); //Or not; value of GV numnber representing itself
        }
Пример #4
0
        private void createBDFRecord(BDFLoc eventLocation, InputEvent evt)
        {
            BDFLoc startingPt = eventLocation + oldOffsetInPts; //calculate starting point
            if (startingPt.Rec < 0) return; //start of record outside of file coverage; so skip it
            BDFLoc endPt = startingPt + newRecordLength * decimation; //calculate ending point
            if (endPt.Rec >= BDF.NumberOfRecords) return; //end of record outside of file coverage

            /***** Read correct portion of BDF file and decimate *****/
            int pt = 0;
            int j;
            int k;
            int p = 0; //set to avoid compiler complaining about uninitialized variable!
            for (int rec = startingPt.Rec; rec <= endPt.Rec; rec++)
            {
                if (BDF.read(rec) == null) throw new Exception("Unable to read BDF record #" + rec.ToString("0"));
                if (rec == startingPt.Rec) j = startingPt.Pt;
                else j = p - BDF.NSamp; // calculate point offset at beginning of new record, taking into account left over from decimation
                if (rec == endPt.Rec) k = endPt.Pt;
                else k = BDF.NSamp;
                for (p = j; p < k; p += decimation, pt++)
                    for (int c = 0; c < BDF.NumberOfChannels - 1; c++)
                        bigBuff[c, pt] = (float)BDF.getSample(c, p);
            }

            /***** Get group variable for this record and set Status channel values *****/
            string s = evt.GVValue[EDE.GroupVars.FindIndex(n => n.Equals(GV0))]; //Find value for this GV
            newStatus[StatusMarkerType == 1 ? 0 : -newOffsetInPts] = GV0.ConvertGVValueStringToInteger(s);
            // then propagate throughout Status channel
            for (int i = (StatusMarkerType == 1 ? 1 : 1 - newOffsetInPts); i < newRecordLength; i++) newStatus[i] = newStatus[i - 1];
            BDFWriter.putStatus(newStatus);

            /***** Calculate referenced data *****/
            calculateReferencedData();

            /***** Write out record after loading appropriate data *****/
            for (int iChan = 0; iChan < BDFWriter.NumberOfChannels - 1; iChan++)
            {
                int channel = channels[iChan]; // translate channel numbers
                double ave = 0.0;
                double beta = 0.0;
                double fn = (double)newRecordLength;
                if (radinOffset) //calculate Radin offset for this channel, based on a segment of the data specified by radinLow and radinHigh
                {
                    for (int i = radinLow; i < radinHigh; i++) ave += bigBuff[channel, i];
                    ave = ave / (double)(radinHigh - radinLow);
                }
                if (removeOffsets || removeTrends) //calculate average for this channel
                {
                    for (int i = 0; i < newRecordLength; i++) ave += bigBuff[channel, i];
                    ave = ave / fn;
                }
                double t = 0D;
                if (removeTrends) //calculate linear trend for this channel; see Bloomfield p. 115
                //NOTE: this technique works only for "centered" data: if there are N points, covering NT seconds, it is assumed that
                // these points are located at (2i-N-1)T/2 seconds, for i = 1 to N; in other words, the samples are in the center of
                // each sample time and are symetrically distributed about a central zero time in the record. Then one can separately
                // calculate the mean and the slope and apply them together to remove a linear trend. This doesn't work for quadratic
                // or higher order trend removal however.
                {
                    t = (fn - 1.0D) / 2.0D;
                    fn *= fn * fn - 1D;
                    for (int i = 0; i < newRecordLength; i++) beta += bigBuff[channel, i] * ((double)i - t);
                    beta = 12.0D * beta / fn;
                }
                for (int i = 0; i < newRecordLength; i++) BDFWriter.putSample(iChan, i,
                    bigBuff[channel, i] - (float)(ave + beta * ((double)i - t)));
            }
            BDFWriter.write();
        }
Пример #5
0
        private void createFILMANRecord(BDFLoc stp, InputEvent evt)
        {
            BDFLoc startingPt = stp + offsetInPts; //calculate starting point
            if (startingPt.Rec < 0) return; //start of record outside of file coverage; so skip it
            BDFLoc endPt = startingPt + Convert.ToInt32(length * samplingRate); //calculate ending point
            if (endPt.Rec >= BDF.NumberOfRecords) return; //end of record outside of file coverage

            /***** Read correct portion of BDF file and decimate *****/
            int pt = 0;
            int j;
            int k;
            int p = 0; //set to avoid compiler complaining about uninitialized variable!
            for (int rec = startingPt.Rec; rec <= endPt.Rec; rec++)
            {
                if (BDF.read(rec) == null) throw new Exception("Unable to read BDF record #" + rec.ToString("0"));
                if (rec == startingPt.Rec) j = startingPt.Pt;
                else j = p - BDF.NSamp; // calculate point offset at beginning of new record
                if (rec == endPt.Rec) k = endPt.Pt;
                else k = BDF.NSamp;
                for (p = j; p < k; p += decimation, pt++)
                    for (int c = 0; c < BDF.NumberOfChannels - 1; c++)
                        bigBuff[c, pt] = (float)BDF.getSample(c, p);
            }

            //NOTE: after this point bigBuff containes all channels in BDF file,
            // includes all BDF records that contribute to this output record,
            // but has been decimated to include only those points that will actually be written out!!!
            // This is necessary because referencing channels may not be actually included in the recordSet.

            /***** Get group variable for this record *****/
            int GrVar = 2; //Load up group variables
            foreach (GVEntry gve in GV)
            {
                string s = evt.GVValue[EDE.GroupVars.FindIndex(n => n.Equals(gve))]; //Find value for this GV
                FMStream.record.GV[GrVar++] = gve.ConvertGVValueStringToInteger(s); //Lookup in dictionary
            }

            /***** Include any ancillary data *****/
            if (anc)
            {
                int w = 0;
                for (int i = 0; i < EDE.ancillarySize; i += 4)
                    FMStream.record.ancillary[w++] = (((evt.ancillary[i] << 8)
                        + evt.ancillary[i + 1] << 8)
                        + evt.ancillary[i + 2] << 8)
                        + evt.ancillary[i + 3]; //NOTE: does not change endian; works for little endian to little endian
            }

            /***** Update bigBuff to referenced data *****/
            calculateReferencedData();

            /***** Write out channel after loading appropriate data *****/
            for (int iChan = 0; iChan < FMStream.NC; iChan++)
            {
                int channel = channels[iChan]; // translate channel numbers
                double ave = 0.0;
                double beta = 0.0;
                double fn = (double)FMStream.ND;
                if (radinOffset) //calculate Radin offset for this channel, based on a segment of the data specified by radinLow and radinHigh
                {
                    for (int i = radinLow; i < radinHigh; i++) ave += bigBuff[channel, i];
                    ave = ave / (double)(radinHigh - radinLow);
                }
                if (removeOffsets || removeTrends) //calculate average for this channel; this will always be true if removeTrends true
                {
                    for (int i = 0; i < FMStream.ND; i++) ave += bigBuff[channel, i];
                    ave = ave / fn;
                }
                double t = 0D;
                if (removeTrends) //calculate linear trend for this channel; see Bloomfield p. 115
                //NOTE: this technique works only for "centered" data: if there are N points, covering NT seconds, it is assumed that
                // these points are located at (2i-N-1)T/2 seconds, for i = 1 to N; in other words, the samples are in the center of
                // each sample time and are symetrically distributed about a central zero time in the record. Then one can separately
                // calculate the mean and the slope and apply them together to remove a linear trend. This doesn't work for quadratic
                // or higher order trend removal however.
                {
                    t = (fn - 1.0D) / 2.0D;
                    fn *= fn * fn - 1D;
                    for (int i = 0; i < FMStream.ND; i++) beta += bigBuff[channel, i] * ((double)i - t);
                    beta = 12.0D * beta / fn;
                }
                for (int i = 0; i < FMStream.ND; i++)
                    FMStream.record[i] = (double)bigBuff[channel, i] - (ave + beta * ((double)i - t));
                FMStream.write(); //Channel number group variable taken care of here
            }
        }
Пример #6
0
        public void Execute(object sender, DoWorkEventArgs e)
        {
            bw = (BackgroundWorker)sender;

            bw.ReportProgress(0, "Starting BDFConverter");
            CCIUtilities.Log.writeToLog("Starting BDFConverter on records in " + Path.Combine(directory, FileName));

            /***** Open BDF file *****/
            Microsoft.Win32.SaveFileDialog dlg = new Microsoft.Win32.SaveFileDialog();
            dlg.Title = "Save as BDF file ...";
            dlg.AddExtension = true;
            dlg.DefaultExt = ".bdf"; // Default file extension
            dlg.Filter = "BDF Files (.bdf)|*.bdf"; // Filter files by extension
            dlg.FileName = FileName + "-converted";
            bool? result = dlg.ShowDialog();
            if (result == false)
            {
                e.Result = new int[] { 0, 0 };
                return;
            }
            newRecordLengthPts = oldRecordLengthPts / decimation;

            BDFWriter = new BDFEDFFileWriter(File.Open(dlg.FileName, FileMode.Create, FileAccess.ReadWrite),
                channels.Count + 1, /* Extra channel named Status will have group variable value in it */
                newRecordLengthSec, /* Record length in seconds */
                newRecordLengthPts, /* Record length in points */
                true); /* BDF format */

            log = new LogFile(dlg.FileName + ".log.xml", GVMapElements);
            bigBuff = new float[edfPlus.NumberOfChannels - 1, newRecordLengthPts];   //have to dimension to old channels rather than new
                                                                                //in case we need for reference calculations later
            /***** Create BDF header record *****/
            BDFWriter.LocalRecordingId = edfPlus.LocalRecordingId;
            BDFWriter.LocalSubjectId = edfPlus.LocalSubjectId;
            int chan;
            for (int i = 0; i < channels.Count; i++)
            {
                chan = channels[i];
                BDFWriter.channelLabel(i, edfPlus.channelLabel(chan));
                BDFWriter.transducer(i, edfPlus.transducer(chan));
                BDFWriter.dimension(i, edfPlus.dimension(chan));
                BDFWriter.pMax(i, edfPlus.pMax(chan));
                BDFWriter.pMin(i, edfPlus.pMin(chan));
                BDFWriter.dMax(i, edfPlus.dMax(chan));
                BDFWriter.dMin(i, edfPlus.dMin(chan));
                BDFWriter.prefilter(i, edfPlus.prefilter(chan));
            }
            chan = channels.Count;
            BDFWriter.channelLabel(chan, "Status"); //Make entries for Status channel
            BDFWriter.transducer(chan, "None");
            BDFWriter.dimension(chan, "");
            BDFWriter.pMax(chan, 32767);
            BDFWriter.pMin(chan, -32768);
            BDFWriter.dMax(chan, 32767);
            BDFWriter.dMin(chan, -32768);
            BDFWriter.prefilter(chan, "None");
            BDFWriter.writeHeader();

            log.registerHeader(this);

            BDFLoc stp = edfPlus.LocationFactory.New();
            BDFLoc lastEvent = edfPlus.LocationFactory.New();
            outLoc = BDFWriter.LocationFactory.New();
            lastStatus = 0;

            /***** MAIN LOOP *****/
            foreach (EventMark em in Events) //Loop through Event file
            {
                bw.ReportProgress(0, "Processing event " + em.Time.ToString("0.000")); //Report progress
                stp.FromSecs(em.Time + offset); //set stopping point, where Status transition should occur
                if (!runEDFtoMark(ref lastEvent, stp, lastStatus))
                    throw new Exception("Reached EOF before reaching event at " + em.Time.ToString("0.000") + "secs");
                if (GVMapElements.Contains(em.GV))
                    lastStatus = em.GV.Value;
                else if (deleteAsZero)
                    lastStatus = 0;

            }
            stp.EOF(); //copy out to end of file
            runEDFtoMark(ref lastEvent, stp, lastStatus);
            e.Result = new int[] { BDFWriter.NumberOfRecords, outLoc.Rec }; //both numbers should be the same
            BDFWriter.Close();
            log.Close();
        }
Пример #7
0
 //Runs BDF records with Status = GVValue from lastEventLocation to nextEventLocation
 private bool runEDFtoMark(ref BDFLoc lastEventLocation, BDFLoc nextEventLocation, int GVValue)
 {
     int nChan = BDFWriter.NumberOfChannels - 1;
     while (lastEventLocation.lessThan(nextEventLocation))
     {
         if (outLoc.Pt == 0) //need to refill buffer
             if (!fillBuffer(ref lastEventLocation, edfPlus.LocationFactory.New().EOF())) return false; //reached EOF
         for (int chan = 0; chan < nChan; chan++)
         {
             int c = channels[chan];
             BDFWriter.putSample(chan, outLoc.Pt, (double)bigBuff[c, outLoc.Pt]);
         }
         BDFWriter.putSample(nChan, outLoc.Pt, GVValue);
         if ((++outLoc).Pt == 0)
             BDFWriter.write();
     }
     return true;
 }
Пример #8
0
 /// <summary>
 /// Finds the next Status channel mark of a certain value
 /// </summary>
 /// <param name="gc">GreyCode to search for</param>
 /// <param name="mask">Mask for status word</param>
 /// <param name="stp">Point to begin search</param>
 /// <returns> true if Event found, false otherwise</returns>
 bool findEvent(int gc, ref BDFLoc stp)
 {
     uint b = Utilities.GC2uint((uint)gc);
     int rec = stp.Rec;
     bool first = equalStatusOnly;
     do
     {
         BDFEDFRecord BDFrec = BDF.read(rec++);
         if (BDFrec == null) return false;
         status = BDF.getStatus();
         log.registerHiOrderStatus(status[0]); // check for any change
         if (first && Utilities.GC2uint((uint)(status[stp.Pt] & eventHeader.Mask)) == b) return false; //make sure there's a change, if equal search
         first = false;
         while (stp.Rec != rec)
         {
             uint s = Utilities.GC2uint((uint)(status[stp.Pt] & eventHeader.Mask));
             if (s == b) return true;
             if (!equalStatusOnly && Utilities.modComp(s, b, eventHeader.Status) >= 0) return true;
             stp++;
         }
     } while (true);
 }
Пример #9
0
 /// <summary>
 /// Finds the first "edge" in analog channel marking an extrinsic Event;
 /// search goes "backwards" for leading Event and "forwards" for lagging Event from given point in datastream;
 /// returns with resulting point in the parameter
 /// </summary>
 /// <param name="sp">Point to begin search</param>
 /// <param name="limit">Limit of number of points to search for signal</param>
 /// <returns>true if Event found, false otherwise</returns>
 protected bool findExtrinsicEvent(ref BDFLoc sp, int limit)
 {
     int rec = sp.Rec;
     int l = 0;
     do
     {
         while (sp.Rec == rec)
         {
             if (l++ > limit) return false;
             if (risingEdge == EDE.rise) //concordant edges -- edge in channel is directly related to Status event
             {
                 double samp = BDF.getSample(EDE.channel, sp.Pt);
                 if (risingEdge == EDE.location ? samp > threshold : samp < threshold) return true;
                 sp = sp + (EDE.location ? 1 : -1);
             }
             else //discordant edges
             {
                 //Not implemented
             }
         }
         if (BDF.read(sp.Rec) == null) return false;
         rec = sp.Rec;
     } while (true);
 }
Пример #10
0
        protected bool findEvent(ref BDFLoc stp, InputEvent ie)
        {
            if (!setEpoch) //First Event of this type: calculate start time (epoch) of the first point in the BDF file
            {
                if (!findEvent(ie.GC, ref stp))
                {
                    log.registerError("No Status found for Event named " + EDE.Name, ie);
                    stp.Rec = 0; stp.Pt = 0; //reset
                    return false;
                }
                nominalT.Rec = actualT.Rec = stp.Rec;
                nominalT.Pt = actualT.Pt = stp.Pt;
                epoch = ie.Time - ((double)stp.Rec + (double)stp.Pt / (double)BDF.NSamp)
                    * (double)BDF.RecordDuration;
                log.registerEpochSet(epoch, ie);
                setEpoch = true;
            }
            else //calculate Status search starting point
            {
                double t = ie.Time - epoch; //Calculate seconds from starting epoch
                nominalT.Rec = (int)(t / (double)BDF.RecordDuration); //Record number
                nominalT.Pt = (int)((t - (double)(nominalT.Rec * BDF.RecordDuration)) * (double)samplingRate); //Sample number
                if (continuousSearch)
                {
                    stp.Rec = actualT.Rec; //start at last found Event
                    stp.Pt = actualT.Pt;
                }
                else // find next Event by jumping near to it
                {
                    stp.Rec = nominalT.Rec;
                    stp.Pt = nominalT.Pt;
                    stp -= samplingRate / 16 + 1; //start 1/16sec before estimated time of the event
                }
                if (!findEvent(ie.GC, ref stp)) // find the next Status event in BDF; returns with stp set to event location
                {
                    log.registerError("Unable to locate Status for Event " + EDE.Name, ie);
                    stp.Rec = actualT.Rec; //return to last previous found Event
                    stp.Pt = actualT.Pt;
                    return false;
                }
                actualT.Rec = stp.Rec;
                actualT.Pt = stp.Pt;
            }

            if (!EDE.intrinsic)
                if (!findExtrinsicEvent(ref stp, maxSearch))
                {
                    log.registerError("No extrinsic event found for Event " + EDE.Name, ie);
                    return false;
                }
                else
                    log.registerExtrinsicEvent(nominalT, actualT, stp, ie);
            else
                log.registerIntrinsicEvent(nominalT, actualT, ie);
            return true;
        }
Пример #11
0
 public void registerExtrinsicEvent(BDFLoc nominal, BDFLoc actual, BDFLoc ext, InputEvent ie)
 {
     logStream.WriteStartElement("Event");
     logStream.WriteAttributeString("Index", ie.Index.ToString("0"));
     double t0 = actual.ToSecs();
     double nominalOffset = nominal.ToSecs() - t0;
     logStream.WriteElementString("ActualStatus", t0.ToString("0.000000"));
     logStream.WriteElementString("EventFileDiff", (nominal.ToSecs() - t0).ToString("0.000000"));
     logStream.WriteElementString("ExtrinsicEventDiff", (ext.ToSecs() - t0).ToString("0.000000"));
     logStream.WriteEndElement(/*Event*/);
     gatherStats(t0, nominalOffset);
 }
Пример #12
0
        // Create one new FILMAN record starting at stp and ending before end; return true is successful
        private bool createFILMANRecord(ref BDFLoc stp, BDFLoc end)
        {
            if (!fillBuffer(ref stp, end)) return false;

            /***** Set group variable for this record *****/
            FMStream.record.GV[2] = currentGVValue;
            for (int iChan = 0; iChan < FMStream.NC; iChan++)
            {
                int channel = channels[iChan]; // translate channel numbers
                double ave = 0.0;
                double beta = 0.0;
                double fn = (double)FMStream.ND;
                if (removeOffsets || removeTrends) //calculate average for this channel; this will always be true if removeTrends true
                {
                    for (int i = 0; i < FMStream.ND; i++) ave += bigBuff[channel, i];
                    ave = ave / fn;
                }
                double t = 0D;
                if (removeTrends) //calculate linear trend for this channel; see Bloomfield p. 115
                {
                    t = (fn - 1.0D) / 2.0D;
                    fn *= fn * fn - 1D;
                    for (int i = 0; i < FMStream.ND; i++) beta += (bigBuff[channel, i] - ave) * ((double)i - t);
                    beta = 12.0D * beta / fn;
                }
                for (int i = 0; i < FMStream.ND; i++)
                    FMStream.record[i] = (double)bigBuff[channel, i] - (ave + beta * ((double)i - t));
                FMStream.write(); //Channel number group variable taken care of here
            }
            return true;
        }