コード例 #1
0
 /// <summary>
 /// To use: Prepare Windows Media Writer and other objects, construct, call the Encode method,
 /// use properties and other methods to get results, release, close Windows Media Writer.
 /// </summary>
 /// <param name="segment">Batch params for this segment.  We assume all fields have already been validated.</param>
 /// <param name="jobStart">Absolute start time of the first segment of the job in ticks</param>
 /// <param name="offset">The total timespan in ticks of all previous segments in this job</param>
 /// <param name="wmWriter">Windows Media Writer object to write to.  We assume it has been preconfigured and started.</param>
 /// <param name="progressTracker">Where to put UI status updates</param>
 /// <param name="logMgr">Where to put log messages</param>
 /// <param name="profileData">compressed media types we are writing to in the norecompression case</param>
 public WMSegment(ArchiveTranscoderJob job, ArchiveTranscoderJobSegment segment,
                  long jobStart, long offset, WMWriter wmWriter,
                  ProgressTracker progressTracker, LogMgr logMgr, ProfileData profileData, bool norecompression,
                  WMSegment previousSegment)
 {
     cancel                 = false;
     this.logMgr            = logMgr;
     this.job               = job;
     this.segment           = segment;
     this.offset            = offset;
     this.jobStart          = jobStart;
     this.progressTracker   = progressTracker;
     this.profileData       = profileData;
     this.norecompression   = norecompression;
     this.m_PreviousSegment = previousSegment;
     videoStream            = null;
     slideStream            = null;
     audioStreams           = null;
     presentationMgr        = null;
     writer                 = wmWriter;
     startTime              = endTime = DateTime.MinValue;
     startTime              = DateTime.Parse(segment.StartTime);
     endTime                = DateTime.Parse(segment.EndTime);
     useSlideStream         = false;
     if (Utility.SegmentFlagIsSet(segment, SegmentFlags.SlidesReplaceVideo))
     {
         useSlideStream = true;
     }
 }
コード例 #2
0
 public SampleBuffer(StreamMgr streamMgr, uint ticksPerSample, ArrayList incompatibleGuids, int targetChannels)
 {
     this.streamMgr         = streamMgr;
     this.ticksPerSample    = ticksPerSample;
     this.incompatibleGuids = incompatibleGuids;
     this.targetChannels    = targetChannels;
     sampleCounter          = 0;
     exhausted = false;
 }
コード例 #3
0
 public PresentationFromVideoMgr(ArchiveTranscoderJobSegmentVideoDescriptor videoDescriptor,
                                 long start, long end, long offset, LogMgr errorLog, long previousSegmentEnd, ProgressTracker progressTracker)
 {
     this.videoDescriptor    = videoDescriptor;
     this.start              = start;
     this.end                = end;
     this.offset             = offset;
     this.errorLog           = errorLog;
     this.previousSegmentEnd = previousSegmentEnd;
     this.progressTracker    = progressTracker;
     this.tempdir            = null;
     this.videoStream        = null;
     this.deckGuid           = Guid.Empty;
 }
コード例 #4
0
        /// <summary>
        /// Write audio, video and presentation data for this segment.  This can be a long-running process.
        /// It can be cancelled with the Stop method.
        /// </summary>
        /// <param name="noRecompression"></param>
        /// <returns>A message string indicates a serious problem.  Null for normal termination.</returns>
        public String Encode()
        {
            if (cancel)
            {
                return(null);
            }

            if ((startTime == DateTime.MinValue) || (endTime == DateTime.MinValue))
            {
                return("Invalid timespan.");
            }

            if (useSlideStream && norecompression)
            {
                return("A slide stream cannot be processed in 'no recompression' mode.");
            }

            progressTracker.EndValue = (int)((TimeSpan)(endTime - startTime)).TotalSeconds;

            if (useSlideStream)
            {
                slideStream = new SlideStreamMgr(job, segment, logMgr, 29.97, writer.FrameWidth, writer.FrameHeight); //Using slides in place of the video
            }
            else
            {
                videoStream = new StreamMgr(segment.VideoDescriptor.VideoCname, segment.VideoDescriptor.VideoName, startTime, endTime, norecompression, PayloadType.dynamicVideo);
            }

            audiorecompression = !norecompression;
            //In this case we actually do need to recompress just the audio:
            if ((norecompression) && (segment.AudioDescriptor.Length != 1))
            {
                audiorecompression = true;
            }

            audioStreams = new StreamMgr[segment.AudioDescriptor.Length];
            for (int i = 0; i < segment.AudioDescriptor.Length; i++)
            {
                audioStreams[i] = new StreamMgr(segment.AudioDescriptor[i].AudioCname, segment.AudioDescriptor[i].AudioName, startTime, endTime, !audiorecompression, PayloadType.dynamicAudio);
            }

            if (cancel)
            {
                return(null);
            }

            actualSegmentStart = 0;
            actualSegmentEnd   = 0;

            if (norecompression)
            {
                if (useSlideStream)
                {
                    //Not supported
                }
                else
                {
                    // Warn and log an error if a problem is detected with the media type, but try to proceed anyway.
                    videoStream.ValidateCompressedMT(profileData.VideoMediaType, logMgr);
                    // Make the last MT available to the caller to pass to the next segment to facilitate the checking.
                    //this.compressedVideoMediaType = videoStream.GetFinalCompressedVideoMediaType();
                }

                if (audioStreams.Length == 1)
                {
                    //There is truly no recompression in this case.
                    ///as above, do the same check with the Audio MediaType.  Log a warning if the MT changed, but attempt to proceed.
                    audioStreams[0].ValidateCompressedMT(profileData.AudioMediaType, logMgr);
                    //this.compressedAudioMediaType = audioStreams[0].GetFinalCompressedAudioMediaType();
                    progressTracker.AVStatusMessage = "Writing Raw AV";
                }
                else
                {
                    //In this case we have to recompress audio in order to do the mixing, but that should be relatively quick.
                    //Note that the WMSDK docs say that SetInputProps must be set before BeginWriting.  This implies that
                    //alternating between writing compressed and uncompressed samples is not supported.  Therefore we will
                    //first recompress the mixed audio, then deliver compressed samples to the writer.

                    for (int i = 0; i < audioStreams.Length; i++)
                    {
                        progressTracker.AVStatusMessage = "Reading Audio (" + (i + 1).ToString() + " of " + audioStreams.Length.ToString() + ")";
                        if (cancel)
                        {
                            return(null);
                        }
                        if (!audioStreams[i].ToRawWMFile(progressTracker))
                        {
                            return("Failed to configure a raw audio profile.");
                        }
                    }

                    progressTracker.AVStatusMessage = "Mixing Audio";
                    mixer = new AudioMixer(audioStreams, this.logMgr);

                    /// PRI3: We could tell the mixer to recompress with the previous segment's MT (if any).
                    /// For now we just use the
                    /// mixer's voting mechanism to choose the 'dominant' input (uncompressed) format,
                    /// and make the profile from one of the streams that uses that format.

                    mixer.Recompress(progressTracker);
                    progressTracker.AVStatusMessage = "Writing Raw AV";
                }
            }
            else             // Recompress both audio and video
            {
                //In order to recompress, we first need to convert each stream to a raw wmv/wma
                //A slide stream starts life uncompressed, so just initialize decks.
                if (!useSlideStream)
                {
                    progressTracker.AVStatusMessage = "Reading Video";
                    if (!videoStream.ToRawWMFile(progressTracker))
                    {
                        return("Failed to configure the raw video profile.");
                    }
                }
                else
                {
                    if (!slideStream.Init(progressTracker))
                    {
                        return("Failed to prepare slide decks.");
                    }
                }
                for (int i = 0; i < audioStreams.Length; i++)
                {
                    progressTracker.AVStatusMessage = "Reading Audio (" + (i + 1).ToString() + " of " + audioStreams.Length.ToString() + ")";
                    if (cancel)
                    {
                        return(null);
                    }
                    if (!audioStreams[i].ToRawWMFile(progressTracker))
                    {
                        return("Failed to configure a raw audio profile.");
                    }
                }

                mixer = new AudioMixer(audioStreams, this.logMgr);

                writer.GetInputProps();
                //The SDK allows us to reconfigure the MediaTypes on the fly if we are writing uncompressed samples.
                //We do this at the beginning of every segment, even though most of the time it is probably the same MT.
                writer.ConfigAudio(mixer.UncompressedAudioMediaType);
                if (useSlideStream)
                {
                    writer.ConfigVideo(slideStream.UncompressedMediaType);
                    //writer.DebugPrintVideoProps();
                }
                else
                {
                    writer.ConfigVideo(videoStream.GetUncompressedVideoMediaType());
                }
                progressTracker.CurrentValue    = 0;
                progressTracker.AVStatusMessage = "Transcoding AV";
            }

            //Now all the config and prep is done, so write the segment.
            writeSegment();

            //If there is a Presentation stream, process it here unless the slides were used for the video stream.
            if ((!useSlideStream) && (this.segment.PresentationDescriptor != null) &&
                (this.segment.PresentationDescriptor.PresentationCname != null))
            {
                progressTracker.CurrentValue        = 0;
                progressTracker.ShowPercentComplete = false;
                progressTracker.AVStatusMessage     = "Writing Presentation";
                /// The offset for PresentationMgr is a timespan in ticks to be subtracted from each absolute timestamp
                /// to make a new absolute timestamp which has been adjusted for accumulted time skipped (or overlap) between
                /// segments, or time during which there is missing AV data at the beginning of the first segment.
                ///   It is calculated as: actualSegmentStart - jobStart - offset
                ///		where:
                ///	actualSegmentStart: Time of the first AV write for the current segment
                ///	JobStart: user requested start time for the first segment in this job (Presentation data reference time)
                ///	offset: calculated actual duration of all segments previous to this one.
                ///
                ///	Note that the presentation output will use the user-specified jobStart as the reference time.
                ///	During playback, relative timings of presentation events will be calculated by subtracting the reference time.
                ///	Also note that the reference time may not be the same as the actualSegmentStart for the first segment of the
                ///	job in the case where there is missing data at the beginning.  This often (always) happens if we
                ///	begin processing an archive from the beginning, since it takes several seconds to get the first AV bits
                ///	into the database after the ArchiveService joins a venue.

                //long thisSegmentOffset = this.actualSegmentStart - startTime.Ticks;
                //long tmpoffset = this.actualSegmentStart-jobStart-offset;
                //Debug.WriteLine ("this segment offset. actualSegmentStart = " + this.actualSegmentStart.ToString() +
                //	" jobStart = " + jobStart.ToString() + " offset = " + offset.ToString() +
                //	" offset to presenterMgr = " + tmpoffset.ToString());
                long previousSegmentEndTime = 0;
                if (m_PreviousSegment != null)
                {
                    previousSegmentEndTime = m_PreviousSegment.actualSegmentEnd;
                }
                presentationMgr = new PresentationMgr(this.segment.PresentationDescriptor, this.actualSegmentStart, this.actualSegmentEnd,
                                                      this.actualSegmentStart - jobStart - offset, this.logMgr, previousSegmentEndTime, progressTracker);

                if (cancel)
                {
                    return(null);
                }
                String errmsg = presentationMgr.Process();
                if (errmsg != null)
                {
                    this.logMgr.WriteLine(errmsg);
                    this.logMgr.ErrorLevel = 5;
                }
                progressTracker.ShowPercentComplete = true;
            }

            if ((useSlideStream) && (slideStream != null))
            {
                slideStream.Dispose();
            }

            return(null);
        }
コード例 #5
0
        /// <summary>
        /// Capture stills from the video stream.  Create a temporary directory and save the images there.
        /// Compile a list of DataItem objects to indicate when the slide transitions should take place.
        /// </summary>
        /// <returns></returns>
        public string Process()
        {
            ImageFilter.ImageFilter imgFilter = null;
            try {
                imgFilter = new ImageFilter.ImageFilter();
                imgFilter.DifferenceThreshold = this.differenceThreshold;
            }
            catch {
                this.errorLog.Append("Video capture images in the presentation will not be filtered probably " +
                                     "because ImageMagick is not available in the configuration.\r\n");
            }

            this.differenceMetrics = new List <double>();
            metadata = new List <PresentationMgr.DataItem>();
            RTUpdate rtu = new RTUpdate();

            this.deckGuid = Guid.NewGuid();
            rtu.DeckGuid  = this.deckGuid;
            rtu.SlideSize = 1.0;
            rtu.DeckType  = (Int32)DeckTypeEnum.Presentation;

            this.videoStream = new StreamMgr(videoDescriptor.VideoCname, videoDescriptor.VideoName, new  DateTime(this.start), new DateTime(this.end), false, PayloadType.dynamicVideo);
            this.videoStream.ToRawWMFile(this.progressTracker);
            MediaTypeVideoInfo mtvi = videoStream.GetUncompressedVideoMediaType();

            this.tempdir = Utility.GetTempDir();
            Directory.CreateDirectory(this.tempdir);
            string filebase  = "slide";
            string extent    = ".jpg";
            int    fileindex = 1;

            BufferChunk bc;
            long        time;
            bool        newStream;
            string      previousFile = null;

            this.stopNow = false;
            while ((!stopNow) && (videoStream.GetNextSample(out bc, out time, out newStream)))
            {
                if ((time - lastFramegrab) >= (long)(this.frameGrabIntervalMs * Constants.TicksPerMs))
                {
                    DateTime dt = new DateTime(time);
                    Debug.WriteLine("time=" + dt.ToString() + ";length=" + bc.Length.ToString());
                    lastFramegrab = time;
                    string      filepath    = Path.Combine(tempdir, filebase + fileindex.ToString() + extent);
                    PixelFormat pixelFormat = subtypeToPixelFormat(mtvi.SubType);
                    Bitmap      bm          = new Bitmap(mtvi.VideoInfo.BitmapInfo.Width, mtvi.VideoInfo.BitmapInfo.Height, pixelFormat);
                    BitmapData  bd          = bm.LockBits(new Rectangle(0, 0, mtvi.VideoInfo.BitmapInfo.Width, mtvi.VideoInfo.BitmapInfo.Height), ImageLockMode.ReadWrite, pixelFormat);
                    Marshal.Copy(bc.Buffer, 0, bd.Scan0, bc.Length);
                    bm.UnlockBits(bd);
                    bm.RotateFlip(RotateFlipType.RotateNoneFlipY);
                    if ((SCALE) && (mtvi.VideoInfo.BitmapInfo.Width >= 1280))
                    {
                        int    w      = mtvi.VideoInfo.BitmapInfo.Width / 2;
                        int    h      = mtvi.VideoInfo.BitmapInfo.Height / 2;
                        Bitmap scaled = new Bitmap(bm, new Size(w, h));
                        scaled.Save(filepath, ImageFormat.Jpeg);
                    }
                    else
                    {
                        bm.Save(filepath, ImageFormat.Jpeg);
                    }
                    if (imgFilter != null)
                    {
                        string filterMsg;
                        bool   filterError;
                        double metric;
                        bool   differ = imgFilter.ImagesDiffer(filepath, previousFile, out filterMsg, out filterError, out metric);
                        if (filterError)
                        {
                            //this.errorLog.Append(filterMsg);
                            Console.WriteLine(filterMsg);
                        }
                        if (!differ)
                        {
                            continue;
                        }
                        this.differenceMetrics.Add(metric);
                        Console.WriteLine("Framegrab slide index: " + fileindex.ToString() +
                                          "; difference: " + metric.ToString() + "; time: " + dt.ToString());
                    }
                    rtu.SlideIndex = fileindex - 1; // This is a zero-based index
                    metadata.Add(new PresentationMgr.DataItem(time - this.offset, PresentationMgr.CopyRTUpdate(rtu)));
                    fileindex++;
                    previousFile = filepath;
                }
            }
            return(null);
        }