/// <summary>
        /// Do all the initial time consuming preparation including building temp directories from
        /// any decks specified with the job, and scanning data prior to start time to attempt to determine
        /// the presentation state.
        /// We require that Init complete before GetNextSample.
        /// </summary>
        public bool Init(ProgressTracker progressTracker)
        {
            if ((streamPlayers == null) || (streamPlayers.Length == 0))
            {
                // No data is not considered an error.
                return(true);
            }

            //preserve the end value to be restored when we're done.
            int oldEnd = progressTracker.EndValue;

            //build directories from any decks specified in the job
            imageGenerator = SlideImageGenerator.GetInstance(this.job, progressTracker, logMgr);

            //Refresh image export size and job in case they have changed.
            imageGenerator.SetImageExportSize(false, this.outputWidth, this.outputHeight);
            imageGenerator.Job = this.job;

            //Run process to build or refresh deck images.
            imageGenerator.Process();

            //Tell SlideImageMgr about the decks.
            slideImageMgr.SetSlideDirs(imageGenerator.OutputDirs);

            //scan data preceeding start time to establish all initial state
            progressTracker.CurrentValue  = 0;
            progressTracker.EndValue      = (int)lookBehindDuration;
            progressTracker.CustomMessage = "Initializing Presentation Data";
            BufferChunk bc;
            long        timestamp;

            while (!cancel)
            {
                long t; int index;
                if (getNextStreamPlayerFrameTime(out t, out index))
                {
                    if (t < start.Ticks)
                    {
                        if (streamPlayers[index].GetNextFrame(out bc, out timestamp))
                        {
                            slideImageMgr.ProcessFrame(bc);
                            progressTracker.CurrentValue = (int)(((TimeSpan)(new DateTime(timestamp) - streamPlayers[0].Start)).TotalSeconds);
                        }
                        else
                        {
                            break;
                        }
                    }
                    else
                    {
                        break;
                    }
                }
                else
                {
                    break;
                }
            }

            if (!cancel)
            {
                initialized = true;
            }

            progressTracker.CustomMessage = "";
            progressTracker.EndValue      = oldEnd;
            return(true);
        }
예제 #2
0
        /// <summary>
        /// Write each stream from DBStreamPlayer to a WM file, then create FileStreamPlayers for each.
        /// It is necessary to do this before reading uncompressed samples, or using any of the
        /// methods that return uncompressed MediaTypes.
        /// This can be a long-running process.  It can be cancelled with the Stop method.
        /// </summary>
        /// <returns>False if we failed to configure the native profile</returns>
        public bool ToRawWMFile(ProgressTracker progressTracker)
        {
            if (cancel)
            {
                return(true);
            }

            String tmpfile = "";

            fileStreamPlayers = new FileStreamPlayer[streamPlayers.Length];
            for (int i = 0; i < streams.Length; i++)
            {
                streamProfileData = ProfileUtility.StreamIdToProfileData(streams[i], payload);
                if (payload == PayloadType.dynamicVideo)
                {
                    tmpfile = Utility.GetTempFilePath("wmv");
                    //nativeProfile = ProfileUtility.MakeNativeVideoProfile(streams[i]);
                }
                else
                {
                    tmpfile = Utility.GetTempFilePath("wma");
                    //nativeProfile = ProfileUtility.MakeNativeAudioProfile(streams[i]);
                }
                WMWriter wmWriter = new WMWriter();
                wmWriter.Init();
                //if (!wmWriter.ConfigProfile(nativeProfile,"",0))
                if (!wmWriter.ConfigProfile(StreamProfileData))
                {
                    return(false);
                }
                wmWriter.ConfigFile(tmpfile);
                wmWriter.ConfigNullProps();
                //wmWriter.SetCodecInfo(payload);
                wmWriter.Start();

                long        streamTime    = long.MaxValue;
                long        refTime       = 0;
                long        endTime       = 0;
                long        lastWriteTime = 0;
                BufferChunk frame;
                BufferChunk sample;
                bool        keyframe;
                bool        discontinuity;
                discontinuity = true;
                //Catch exceptions to work around the rare case of data corruption.
                //Oddly in one case where this occurred it did not occur if the segments were short enough
                while (streamPlayers[i].GetNextFrame(out frame, out streamTime))
                {
                    try {
                        sample = ProfileUtility.FrameToSample(frame, out keyframe);
                    }
                    catch {
                        DateTime dt = new DateTime(streamTime);
                        Console.WriteLine("Possible data corruption in stream: " + this.payload + ";" + this.cname +
                                          " at " + dt.ToString() + " (" + streamTime.ToString() + ")");
                        continue;
                    }
                    if (refTime == 0)
                    {
                        refTime = streamTime;
                    }
                    lastWriteTime = streamTime - refTime;
                    try {
                        if (payload == PayloadType.dynamicVideo)
                        {
                            //Debug.WriteLine("Write video: " + (streamTime-refTime).ToString() + ";length=" + sample.Length.ToString());
                            wmWriter.WriteCompressedVideo((ulong)(streamTime - refTime), (uint)sample.Length, (byte[])sample, keyframe, discontinuity);
                        }
                        else
                        {
                            //Debug.WriteLine("Write audio: " + (streamTime-refTime).ToString() + ";length=" + sample.Length.ToString());
                            wmWriter.WriteCompressedAudio((ulong)(streamTime - refTime), (uint)sample.Length, (byte[])sample);
                        }
                    }
                    catch {
                        DateTime dt = new DateTime(streamTime);
                        Console.WriteLine("Failed to write.  Possible data corruption in stream: " + this.payload + ";" + this.cname +
                                          " at " + dt.ToString() + " (" + streamTime.ToString() + ")");
                    }

                    if (discontinuity)
                    {
                        discontinuity = false;
                    }
                    endTime = streamTime;
                    if (cancel)
                    {
                        break;
                    }

                    progressTracker.CurrentValue = (int)(lastWriteTime / Constants.TicksPerSec);
                    //Debug.WriteLine("StreamMgr.ToRawWMFile: ProgressTracker currentValue=" + progressTracker.CurrentValue.ToString() +
                    //    ";streamTime=" + streamTime.ToString());
                }

                wmWriter.Stop();
                wmWriter.Cleanup();
                wmWriter = null;

                fileStreamPlayers[i] = new FileStreamPlayer(tmpfile, refTime, endTime, false, streams[i]);
                if (cancel)
                {
                    break;
                }
            }
            return(true);
        }
예제 #3
0
        /// <summary>
        /// Recompress audio from mixer into a temp file using the native profile.  This is used to implement mixing
        /// in the 'norecompression' scenario.
        /// </summary>
        /// <param name="progressTracker"></param>
        /// <returns></returns>
        public bool Recompress(ProgressTracker progressTracker)
        {
            cancel = false;

            if (audioMgr.Length == 0)
            {
                return(false);
            }

            //String useProfile;
            ProfileData profileData = null;

            if (this.compatibleStreamID >= 0)
            {
                profileData = ProfileUtility.StreamIdToProfileData(compatibleStreamID, MSR.LST.Net.Rtp.PayloadType.dynamicAudio);
                //Debug.WriteLine("Mixer.Recompress: using audio profile from streamID: " + compatibleStreamID.ToString());
            }
            else
            {
                //Under what circumstances could we get here??
                profileData = audioMgr[0].StreamProfileData;
            }

            WMWriter wmWriter = new WMWriter();

            wmWriter.Init();

            if (!wmWriter.ConfigProfile(profileData))
            {
                return(false);
            }

            String tempFileName = Utility.GetTempFilePath("wma");

            wmWriter.ConfigFile(tempFileName);
            wmWriter.GetInputProps();
            wmWriter.ConfigAudio(audioMgr[0].GetUncompressedAudioMediaType());

            wmWriter.Start();

            //Write samples
            progressTracker.CurrentValue = 0;
            BufferChunk audioSample = null;
            long        audioTime = long.MaxValue;
            long        refTime = 0, endTime = 0;
            long        lastWriteTime = 0;

            while (!cancel)
            {
                if (audioSample == null)
                {
                    endTime = audioTime;
                    if (!GetNextSample(out audioSample, out audioTime))
                    {
                        break;
                    }
                }

                if (audioSample != null)
                {
                    //write audio
                    if (refTime == 0)
                    {
                        refTime = audioTime;
                    }
                    //Debug.WriteLine("mixer.Recompress write audio: " + (audioTime-refTime).ToString() + ";length=" + audioSample.Length.ToString());
                    lastWriteTime = audioTime - refTime;
                    wmWriter.WriteAudio((uint)audioSample.Length, audioSample, (ulong)(audioTime - refTime));
                    audioSample = null;
                }
                else
                {
                    break;
                }
                progressTracker.CurrentValue = (int)(lastWriteTime / (Constants.TicksPerSec));
            }

            wmWriter.Stop();
            wmWriter.Cleanup();
            wmWriter = null;

            //Prepare a filestreamPlayer to read back compressed samples.
            fileStreamPlayer = new FileStreamPlayer(tempFileName, refTime, endTime, true, -1);

            return(true);
        }
예제 #4
0
 public PresentationFromVideoMgr(ArchiveTranscoderJobSegmentVideoDescriptor videoDescriptor,
                                 long start, long end, long offset, LogMgr errorLog, long previousSegmentEnd, ProgressTracker progressTracker)
 {
     this.videoDescriptor    = videoDescriptor;
     this.start              = start;
     this.end                = end;
     this.offset             = offset;
     this.errorLog           = errorLog;
     this.previousSegmentEnd = previousSegmentEnd;
     this.progressTracker    = progressTracker;
     this.tempdir            = null;
     this.videoStream        = null;
     this.deckGuid           = Guid.Empty;
 }