コード例 #1
0
        /// <summary>
        /// Write audio, video and presentation data for this segment.  This can be a long-running process.
        /// It can be cancelled with the Stop method.
        /// </summary>
        /// <param name="noRecompression"></param>
        /// <returns>A message string indicates a serious problem.  Null for normal termination.</returns>
        public String Encode()
        {
            if (cancel)
            {
                return(null);
            }

            if ((startTime == DateTime.MinValue) || (endTime == DateTime.MinValue))
            {
                return("Invalid timespan.");
            }

            if (useSlideStream && norecompression)
            {
                return("A slide stream cannot be processed in 'no recompression' mode.");
            }

            progressTracker.EndValue = (int)((TimeSpan)(endTime - startTime)).TotalSeconds;

            if (useSlideStream)
            {
                slideStream = new SlideStreamMgr(job, segment, logMgr, 29.97, writer.FrameWidth, writer.FrameHeight); //Using slides in place of the video
            }
            else
            {
                videoStream = new StreamMgr(segment.VideoDescriptor.VideoCname, segment.VideoDescriptor.VideoName, startTime, endTime, norecompression, PayloadType.dynamicVideo);
            }

            audiorecompression = !norecompression;
            //In this case we actually do need to recompress just the audio:
            if ((norecompression) && (segment.AudioDescriptor.Length != 1))
            {
                audiorecompression = true;
            }

            audioStreams = new StreamMgr[segment.AudioDescriptor.Length];
            for (int i = 0; i < segment.AudioDescriptor.Length; i++)
            {
                audioStreams[i] = new StreamMgr(segment.AudioDescriptor[i].AudioCname, segment.AudioDescriptor[i].AudioName, startTime, endTime, !audiorecompression, PayloadType.dynamicAudio);
            }

            if (cancel)
            {
                return(null);
            }

            actualSegmentStart = 0;
            actualSegmentEnd   = 0;

            if (norecompression)
            {
                if (useSlideStream)
                {
                    //Not supported
                }
                else
                {
                    // Warn and log an error if a problem is detected with the media type, but try to proceed anyway.
                    videoStream.ValidateCompressedMT(profileData.VideoMediaType, logMgr);
                    // Make the last MT available to the caller to pass to the next segment to facilitate the checking.
                    //this.compressedVideoMediaType = videoStream.GetFinalCompressedVideoMediaType();
                }

                if (audioStreams.Length == 1)
                {
                    //There is truly no recompression in this case.
                    ///as above, do the same check with the Audio MediaType.  Log a warning if the MT changed, but attempt to proceed.
                    audioStreams[0].ValidateCompressedMT(profileData.AudioMediaType, logMgr);
                    //this.compressedAudioMediaType = audioStreams[0].GetFinalCompressedAudioMediaType();
                    progressTracker.AVStatusMessage = "Writing Raw AV";
                }
                else
                {
                    //In this case we have to recompress audio in order to do the mixing, but that should be relatively quick.
                    //Note that the WMSDK docs say that SetInputProps must be set before BeginWriting.  This implies that
                    //alternating between writing compressed and uncompressed samples is not supported.  Therefore we will
                    //first recompress the mixed audio, then deliver compressed samples to the writer.

                    for (int i = 0; i < audioStreams.Length; i++)
                    {
                        progressTracker.AVStatusMessage = "Reading Audio (" + (i + 1).ToString() + " of " + audioStreams.Length.ToString() + ")";
                        if (cancel)
                        {
                            return(null);
                        }
                        if (!audioStreams[i].ToRawWMFile(progressTracker))
                        {
                            return("Failed to configure a raw audio profile.");
                        }
                    }

                    progressTracker.AVStatusMessage = "Mixing Audio";
                    mixer = new AudioMixer(audioStreams, this.logMgr);

                    /// PRI3: We could tell the mixer to recompress with the previous segment's MT (if any).
                    /// For now we just use the
                    /// mixer's voting mechanism to choose the 'dominant' input (uncompressed) format,
                    /// and make the profile from one of the streams that uses that format.

                    mixer.Recompress(progressTracker);
                    progressTracker.AVStatusMessage = "Writing Raw AV";
                }
            }
            else             // Recompress both audio and video
            {
                //In order to recompress, we first need to convert each stream to a raw wmv/wma
                //A slide stream starts life uncompressed, so just initialize decks.
                if (!useSlideStream)
                {
                    progressTracker.AVStatusMessage = "Reading Video";
                    if (!videoStream.ToRawWMFile(progressTracker))
                    {
                        return("Failed to configure the raw video profile.");
                    }
                }
                else
                {
                    if (!slideStream.Init(progressTracker))
                    {
                        return("Failed to prepare slide decks.");
                    }
                }
                for (int i = 0; i < audioStreams.Length; i++)
                {
                    progressTracker.AVStatusMessage = "Reading Audio (" + (i + 1).ToString() + " of " + audioStreams.Length.ToString() + ")";
                    if (cancel)
                    {
                        return(null);
                    }
                    if (!audioStreams[i].ToRawWMFile(progressTracker))
                    {
                        return("Failed to configure a raw audio profile.");
                    }
                }

                mixer = new AudioMixer(audioStreams, this.logMgr);

                writer.GetInputProps();
                //The SDK allows us to reconfigure the MediaTypes on the fly if we are writing uncompressed samples.
                //We do this at the beginning of every segment, even though most of the time it is probably the same MT.
                writer.ConfigAudio(mixer.UncompressedAudioMediaType);
                if (useSlideStream)
                {
                    writer.ConfigVideo(slideStream.UncompressedMediaType);
                    //writer.DebugPrintVideoProps();
                }
                else
                {
                    writer.ConfigVideo(videoStream.GetUncompressedVideoMediaType());
                }
                progressTracker.CurrentValue    = 0;
                progressTracker.AVStatusMessage = "Transcoding AV";
            }

            //Now all the config and prep is done, so write the segment.
            writeSegment();

            //If there is a Presentation stream, process it here unless the slides were used for the video stream.
            if ((!useSlideStream) && (this.segment.PresentationDescriptor != null) &&
                (this.segment.PresentationDescriptor.PresentationCname != null))
            {
                progressTracker.CurrentValue        = 0;
                progressTracker.ShowPercentComplete = false;
                progressTracker.AVStatusMessage     = "Writing Presentation";
                /// The offset for PresentationMgr is a timespan in ticks to be subtracted from each absolute timestamp
                /// to make a new absolute timestamp which has been adjusted for accumulted time skipped (or overlap) between
                /// segments, or time during which there is missing AV data at the beginning of the first segment.
                ///   It is calculated as: actualSegmentStart - jobStart - offset
                ///		where:
                ///	actualSegmentStart: Time of the first AV write for the current segment
                ///	JobStart: user requested start time for the first segment in this job (Presentation data reference time)
                ///	offset: calculated actual duration of all segments previous to this one.
                ///
                ///	Note that the presentation output will use the user-specified jobStart as the reference time.
                ///	During playback, relative timings of presentation events will be calculated by subtracting the reference time.
                ///	Also note that the reference time may not be the same as the actualSegmentStart for the first segment of the
                ///	job in the case where there is missing data at the beginning.  This often (always) happens if we
                ///	begin processing an archive from the beginning, since it takes several seconds to get the first AV bits
                ///	into the database after the ArchiveService joins a venue.

                //long thisSegmentOffset = this.actualSegmentStart - startTime.Ticks;
                //long tmpoffset = this.actualSegmentStart-jobStart-offset;
                //Debug.WriteLine ("this segment offset. actualSegmentStart = " + this.actualSegmentStart.ToString() +
                //	" jobStart = " + jobStart.ToString() + " offset = " + offset.ToString() +
                //	" offset to presenterMgr = " + tmpoffset.ToString());
                long previousSegmentEndTime = 0;
                if (m_PreviousSegment != null)
                {
                    previousSegmentEndTime = m_PreviousSegment.actualSegmentEnd;
                }
                presentationMgr = new PresentationMgr(this.segment.PresentationDescriptor, this.actualSegmentStart, this.actualSegmentEnd,
                                                      this.actualSegmentStart - jobStart - offset, this.logMgr, previousSegmentEndTime, progressTracker);

                if (cancel)
                {
                    return(null);
                }
                String errmsg = presentationMgr.Process();
                if (errmsg != null)
                {
                    this.logMgr.WriteLine(errmsg);
                    this.logMgr.ErrorLevel = 5;
                }
                progressTracker.ShowPercentComplete = true;
            }

            if ((useSlideStream) && (slideStream != null))
            {
                slideStream.Dispose();
            }

            return(null);
        }