/// ------------------------------------------------------------------------------------
        private void InterleaveSegments(TimeRange segmentRange)
        {
            // Write a channel for the source recording segment
            var inputStream = _srcRecStreamProvider.GetStreamSubset(segmentRange);

            if (inputStream != null)
            {
                WriteAudioStreamToChannel(AnnotationChannel.Source, inputStream);
            }

            var pathToAnnotationsFolder = _srcRecordingTier.MediaFileName +
                                          Settings.Default.OralAnnotationsFolderSuffix;

            // Write a channel for the careful speech segment
            var filename = Path.Combine(pathToAnnotationsFolder, TimeTier.ComputeFileNameForCarefulSpeechSegment(segmentRange));

            using (var provider = GetWaveStreamForOralAnnotationSegment(filename, AudioRecordingType.Careful))
            {
                if (provider.Stream != null)
                {
                    WriteAudioStreamToChannel(AnnotationChannel.Careful, provider.Stream);
                }
            }

            // Write a channel for the oral translation segment
            filename = Path.Combine(pathToAnnotationsFolder, TimeTier.ComputeFileNameForOralTranslationSegment(segmentRange));
            using (var provider = GetWaveStreamForOralAnnotationSegment(filename, AudioRecordingType.Translation))
            {
                if (provider.Stream != null)
                {
                    WriteAudioStreamToChannel(AnnotationChannel.Translation, provider.Stream);
                }
            }
        }
        /// ------------------------------------------------------------------------------------
        private OralAnnotationFileGenerator(TimeTier sourceTier, Func <int, bool> ignoreSegment,
                                            ISynchronizeInvoke synchInvoke)
        {
            _srcRecordingTier = sourceTier;
            _synchInvoke      = synchInvoke;

            bool fullySegmented = sourceTier.IsFullySegmented;

            _srcRecordingSegments = new List <TimeRange>();
            for (int i = 0; i < sourceTier.Segments.Count; i++)
            {
                // Per JohnH's request via e-mail (8-12-2012), exclude ignored segments
                if (!ignoreSegment(i))
                {
                    _srcRecordingSegments.Add(sourceTier.Segments[i].TimeRange);
                }
            }
            if (!fullySegmented)
            {
                _srcRecordingSegments.Add(new TimeRange(sourceTier.EndOfLastSegment, sourceTier.TotalTime));
            }

            _srcRecStreamProvider = WaveStreamProvider.Create(
                AudioUtils.GetDefaultWaveFormat(1), _srcRecordingTier.MediaFileName);

            var sourceFormat = _srcRecStreamProvider.Stream.WaveFormat;

            _outputAudioFormat = new WaveFormat(sourceFormat.SampleRate,
                                                sourceFormat.BitsPerSample, sourceFormat.Channels + 2);

            _output1ChannelAudioFormat = new WaveFormat(sourceFormat.SampleRate,
                                                        sourceFormat.BitsPerSample, 1);
        }
Ejemplo n.º 3
0
        /// ------------------------------------------------------------------------------------
        public string Run()
        {
            if (_file.GetAnnotationFile() != null)
            {
                return(_file.GetAnnotationFile().FileName);                // REVIEW: This probably shouldn't happen. Maybe throw an exception.
            }
            WaitCursor.Show();
            var tiers = new TierCollection(_file.PathToAnnotatedFile);

            var timeTier = tiers.GetTimeTier();

            if (timeTier == null)
            {
                timeTier = new TimeTier(_file.PathToAnnotatedFile);
                tiers.Insert(0, timeTier);
            }

            foreach (var segment in GetNaturalBreaks())
            {
                timeTier.AppendSegment((float)segment.TotalSeconds);
            }

            StreamReader.Close();

            WaitCursor.Hide();

            return(tiers.Save());
        }
Ejemplo n.º 4
0
        /// <summary>
        /// The current design doles out segments with empty time ranges, making those objects have incorrect "TimeRange"/"Start"/"End"
        /// properties. Perhaps that design can be changed? In the meantime, if you need your text segments to have actualy TimeRanges, call this.
        /// </summary>
        public void AddTimeRangeData(TimeTier timeTier)
        {
            int length = Segments.Count;

            for (int i = 0; i < length; i++)
            {
                Segments[i].TimeRange = timeTier.Segments[i].TimeRange;
            }
        }
Ejemplo n.º 5
0
        /// ------------------------------------------------------------------------------------
        public TierCollection GetTierCollection()
        {
            var collection = new TierCollection();

            var timeSlots = GetTimeSlots();

            var transcriptionAnnotations = GetTranscriptionTierAnnotations();

            if (transcriptionAnnotations.Count == 0)
            {
                return(collection);
            }

            var freeTransAnnotations = GetFreeTranslationTierAnnotations();

            EnsureMediaFileIsCorrect();

            var timeOrderTier     = new TimeTier(GetFullPathToMediaFile());
            var transcriptionTier = new TextTier(TextTier.ElanTranscriptionTierId);
            var freeTransTier     = new TextTier(TextTier.ElanTranslationTierId);

            foreach (var kvp in transcriptionAnnotations)
            {
                var start = timeSlots[kvp.Value.Attribute("TIME_SLOT_REF1").Value];
                var stop  = timeSlots[kvp.Value.Attribute("TIME_SLOT_REF2").Value];
                timeOrderTier.AddSegment(start, stop);
                transcriptionTier.AddSegment(kvp.Value.Value);

                string freeTransValue;
                freeTransTier.AddSegment(freeTransAnnotations.TryGetValue(kvp.Key,
                                                                          out freeTransValue) ? freeTransValue : string.Empty);
            }

            // Add the time and transcription tiers to the collection.
            collection.Add(timeOrderTier);
            collection.Add(transcriptionTier);
            collection.Add(freeTransTier);

            timeOrderTier.ReadOnlyTimeRanges = GetDoesTranscriptionTierHaveDepedentTimeSubdivisionTier();

            return(collection);
        }
 /// ------------------------------------------------------------------------------------
 private static bool CanGenerate(TimeTier sourceRecodingTier)
 {
     return(sourceRecodingTier != null &&
            Directory.Exists(sourceRecodingTier.MediaFileName + Settings.Default.OralAnnotationsFolderSuffix));
 }