/// <summary> /// Factory method for instantiating the right decoder instance based on streamInfo. /// </summary> public static VideoDecoder CreateFor(VideoStreamInfo streamInfo) { if (streamInfo == null) { throw new System.ArgumentException("Can't choose VideoDecoder without streamInfo (with at least codecFourCC)"); } // list of FourCC codes http://www.fourcc.org/codecs.php switch (streamInfo.codecFourCC) { case VideoDecoderMJPEG.FOURCC_MJPG: case VideoDecoderMJPEG.FOURCC_CJPG: case VideoDecoderMJPEG.FOURCC_ffds: return(new VideoDecoderMJPEG(streamInfo)); case VideoDecoderMPNG.FOURCC_MPNG: return(new VideoDecoderMPNG(streamInfo)); case VideoDecoderRGB.FOURCC_DIB_: case VideoDecoderRGB.FOURCC_NULL: return(new VideoDecoderRGB(streamInfo)); } throw new MpException("No decoder for video fourCC 0x" + streamInfo.codecFourCC.ToString("X") + " (" + RiffParser.FromFourCC(streamInfo.codecFourCC) + ")"); }
IEnumerator CaptureWebcamToFileCoroutine() { // Open a webcam streamer. The url prefix for this is webcam:// // Optionally a webcam device id can be added (to get a list, use WebCamTexture.devices) string webcamStreamUrl = "webcam://"; Streamer streamer = Streamer.forUrl (webcamStreamUrl); streamer.Connect (webcamStreamUrl); // Set up a remux @ 15fps var vi = new VideoStreamInfo (streamer.videoStreamInfo); vi.framerate = 15; // must be lower than framerate with this approach! AviRemux remux = new AviRemux (); remux.Init (File.OpenWrite (outfile), vi, null); // Do fixed time capture, 10 seconds (150 frames @ 15fps) // The webcam framerate can be lower or higher than this. If it is lower then // a frame is written multiple times, if higher, then some frames are now written. float captureStartTime = Time.realtimeSinceStartup; int realFrameNr, lastRealFrameNr = -1; do { // Read a frame from webcam. It returns a frame number, but we're not using it. byte[] buf; frame = streamer.VideoPosition; int bytesCnt = streamer.ReadVideoFrame (out buf); // Calculate the video frame number that we should be writing. realFrameNr = Mathf.RoundToInt ((Time.realtimeSinceStartup - captureStartTime) * vi.framerate); // If the loop is being executed too seldom compared to vi.framerate, write a warning to console. if (realFrameNr - lastRealFrameNr > 1) { Debug.LogWarning ("Output framerate too high, possibly just skipped " + (realFrameNr - lastRealFrameNr) + " frames"); } // Write as many frames as we need. Normally this is 0 or 1, but can be higher (see the warning a few lines above) while (lastRealFrameNr < realFrameNr) { remux.WriteNextVideoFrame (buf, bytesCnt); lastRealFrameNr ++; } // Give control back to Unity for one frame yield return 1; } while(realFrameNr < 150); // We're done. Close the remux and streamer remux.Shutdown (); streamer.Shutdown (); Debug.Log ("Done capturing"); }
/// <summary> /// Factory method for instantiating the right decoder instance based on streamInfo. /// </summary> public static VideoDecoder CreateFor(VideoStreamInfo streamInfo) { if (streamInfo == null) { throw new System.ArgumentException ("Can't choose VideoDecoder without streamInfo (with at least codecFourCC)"); } // list of FourCC codes http://www.fourcc.org/codecs.php switch (streamInfo.codecFourCC) { case VideoDecoderMJPEG.FOURCC_MJPG: case VideoDecoderMJPEG.FOURCC_CJPG: case VideoDecoderMJPEG.FOURCC_ffds: case VideoDecoderMJPEG.FOURCC_jpeg: return new VideoDecoderMJPEG (streamInfo); case VideoDecoderMPNG.FOURCC_MPNG: return new VideoDecoderMPNG (streamInfo); case VideoDecoderRGB.FOURCC_DIB_: case VideoDecoderRGB.FOURCC_NULL: return new VideoDecoderRGB (streamInfo); } throw new MpException ("No decoder for video fourCC 0x" + streamInfo.codecFourCC.ToString ("X") + " (" + RiffParser.FromFourCC (streamInfo.codecFourCC) + ")"); }
/// <summary> /// Initializes the remux. For convenience, call base.Init(...) in your subclass. /// /// Depending on the output format, videoStreamInfo and audioStreamInfo can be NULL /// to indicate, for example, that the AVI won't have audio. /// </summary> public virtual void Init(Stream dstStream, VideoStreamInfo videoStreamInfo, AudioStreamInfo audioStreamInfo) { this.dstStream = dstStream; this._videoStreamInfo = videoStreamInfo; this._audioStreamInfo = audioStreamInfo; }
public override void Init(Stream sourceStream, LoadOptions loadOptions = null) { // skip the video if asked not to load it if (loadOptions != null && loadOptions.skipVideo) return; // check the arguments if (sourceStream == null) { throw new System.ArgumentException ("sourceStream is required"); } // measure load time var watch = new System.Diagnostics.Stopwatch (); watch.Start (); reader = new AtomicBinaryReader (sourceStream); // for detecting the buffer size int maxRawJpgSize = 0; // the stream can't be seeked unless there is an index. create it frameStartIndex.Clear (); frameSize.Clear (); long markerCount = 0; long startIndex = -1; bool markerStart = false; int bytesRead = -1; long i = 0; var buffer = new byte[FILE_READ_BUFFER_SIZE]; // read the file in chunks (more than 30x faster than reading by byte) long p = 0; do { bytesRead = reader.Read (ref p, buffer, 0, FILE_READ_BUFFER_SIZE); for (int j = 0; j < bytesRead; j++) { byte b = buffer [j]; // wait for marker start if (b == 0xFF) { markerStart = true; } else if (markerStart) { // read the other marker byte and decide what to do switch (b) { case 0xD8: // Start of image startIndex = i + j - 1; break; case 0xD9: // End of image frameStartIndex.Add (startIndex); int size = (int)(i + j - startIndex + 1); if (size > maxRawJpgSize) maxRawJpgSize = size; frameSize.Add (size); //Debug.Log("Found frame OFFS: " + startIndex + " SIZE: " + size); break; } markerStart = false; markerCount++; } } i += bytesRead; } while(bytesRead >= FILE_READ_BUFFER_SIZE); // create a buffer for holding raw jpg data when decoding a frame rawJpgBuffer = new byte[maxRawJpgSize]; watch.Stop (); #if MP_DEBUG Debug.Log("Recreated index for raw MJPEG stream in " + (watch.Elapsed.TotalMilliseconds * 0.001f) + " seconds." + "Frames: " + frameStartIndex.Count + ". Max jpg size: " + maxRawJpgSize + ". Markers: " + markerCount); #endif // set all the info about the video stream we know if (loadOptions != null && loadOptions.videoStreamInfo != null) { videoStreamInfo = loadOptions.videoStreamInfo; } else { videoStreamInfo = new VideoStreamInfo (); videoStreamInfo.codecFourCC = VideoDecoderMJPEG.FOURCC_MJPG; } videoStreamInfo.frameCount = frameSize.Count; videoStreamInfo.lengthBytes = reader.StreamLength; }
void DropHalfTheFramesRemux() { // In this example we're going one level deeper in the API and work directly // with Demux class. We could use MoviePlayerUtil.Load too, but for remuxing // we don't need Decoders to be instantiated, because we're just copying encoded // frame bytes around. // // Since we're not using decoders, we're not referencing anything from Unity API. // Therefore it's possible to run it in separate thread. RunInBackgroundOrNot (delegate() { // Instantiate a demux for an input stream based on stream type. Stream instream = File.OpenRead (infile); Demux demux = Demux.forSource (instream); demux.Init (instream); // Instantiate a remux for an output stream. Here we have to explicity // instantiate the remux we want, in this case, AviRemux, and set its // properties. Since we're not doing much here, we can use the same // videoStreamInfo and audioStreamInfo for remux as demux. For the video // however we clone the stream info, because we want to change it. Since // we're going to drop every other frame, we also need to lower the // video framerate. Stream outstream = File.OpenWrite (outfile); Remux remux = new AviRemux (); var remuxVideoStreamInfo = new VideoStreamInfo (demux.videoStreamInfo); remuxVideoStreamInfo.framerate /= 2; remux.Init (outstream, remuxVideoStreamInfo, demux.audioStreamInfo); // Just sum buffers and variables needed later byte[] videoBuffer, audioBuffer; int videoBytesRead, audioBytesRead; // Loop until we've processed all the video frames. If we wanted to run this code // in main Unity thread without blocking, then we could wrap it all in a coroutine // and do "yield return 1" inside the loop. do { // Here we're using sequential access to video (and audio) stream. The same could // be achieved with random access, but then only demuxes that can seek in a file // can be used (no streaming from network or webcam). videoBytesRead = demux.ReadVideoFrame (out videoBuffer); if (videoBytesRead > 0) { // Read the exact number of audio samples that are to be played during this frame int samplesPerVideoFrame = (int)(demux.audioStreamInfo.sampleRate / demux.videoStreamInfo.framerate); audioBytesRead = demux.ReadAudioSamples (out audioBuffer, samplesPerVideoFrame); // Only write every second video frame, but all the audio samples. The total stream // lengths will still be the same, because we've set the framerate for remuxed stream // to half of the original. if (demux.VideoPosition % 2 == 1) { remux.WriteNextVideoFrame (videoBuffer, videoBytesRead); } remux.WriteNextAudioSamples (audioBuffer, audioBytesRead); } } while(videoBytesRead > 0); // Close the remux and demux. While it's possible to leave demux just hanging there unclosed and // possibly introducing a memory leak, we have to Close the remux for the output to be playable. // The reason is that AviDemux needs to write all unwritten index chunks and update the avi header // after all frames have been written. remux.Shutdown (); demux.Shutdown (); }); }