Ejemplo n.º 1
0
        //TODO: keep track of what types were added to map (a type added event) and store the types that implement IPartUpdatable
        //When that happens, ask map to include derived, and only store the base most type that implements IPartUpdatable
        /// <param name="types_main">Types to call update on the main thread (can add the same type to both main and any)</param>
        /// <param name="types_any">Types to call update on any thread (can add the same type to both main and any)</param>
        public UpdateManager(Type[] types_main, Type[] types_any, Map map, int interval = 50, bool useTimer = true)
        {
            #region validate

            Type updateableType = typeof(IPartUpdatable);
            Type nonImplement   = UtilityCore.Iterate(types_main, types_any).
                                  Where(o => !o.GetInterfaces().Any(p => p.Equals(updateableType))).
                                  FirstOrDefault(); // one example is enough

            if (nonImplement != null)
            {
                throw new ArgumentException("Type passed in doesn't implement IPartUpdatable: " + nonImplement.ToString());
            }

            #endregion

            _typesMainUnchecked = types_main;
            _typesAnyUnchecked  = types_any;

            _map = map;

            if (useTimer)
            {
                _timerAnyThread           = new System.Timers.Timer();
                _timerAnyThread.Interval  = 50;
                _timerAnyThread.AutoReset = false;       // makes sure only one tick is firing at a time
                _timerAnyThread.Elapsed  += TimerAnyThread_Elapsed;
                _timerAnyThread.Start();
            }
            else
            {
                _clockAnyThread = new RealtimeClock();
            }
        }
Ejemplo n.º 2
0
        public void StartRecording()
        {
            // Start recording
            var clock = new RealtimeClock();

            recorder           = new MP4Recorder(webCamTexture.width, webCamTexture.height, 30);
            webCamTextureInput = new WebCamTextureInput(recorder, clock, webCamTexture);
        }
Ejemplo n.º 3
0
 void StartRecording()
 {
     Debug.Log("start recording method");
     // Start recording
     format = new VideoFormat(960, 540);
     NatCorder.StartRecording(Container.MP4, format, AudioFormat.None, OnReplay);
     // Create a camera recorder to record the main camera
     clock         = new RealtimeClock();
     videoRecorder = CameraRecorder.Create(mainCamera, clock);
     // audioRecorder = AudioRecorder.Create(sourceAudio);
 }
Ejemplo n.º 4
0
    public void StartRecording()
    {
        var sampleRate   = 44100;
        var channelCount = 1;

        // Start recording from the main camera
        recordingClock = new RealtimeClock();
        videoRecorder  = new MP4Recorder(videoWidth, videoHeight, 30, sampleRate, channelCount, OnRecording);
        cameraInput    = new CameraInput(videoRecorder, recordingClock, Camera.main);
        // Start the microphone
        audioDevice = AudioDevice.GetDevices()[0];
        audioDevice.StartRecording(sampleRate, channelCount, this);
    }
        /// <summary>
        /// Waits for the frame extractor to be ready for playback.
        /// Returns true if successful, false if it timed out.
        /// </summary>
        private bool WaitForPlaybackReadyState()
        {
            RealtimeClock.Pause();

            var renderTime = RealtimeClock.PositionSeconds;
            var startTime  = DateTime.UtcNow;
            var cycleCount = -1;
            FFmpegMediaFrame playbackFrame = null;

            while (IsCancellationPending == false)
            {
                if (DateTime.UtcNow.Subtract(startTime) > Constants.WaitForPlaybackReadyStateTimeout)
                {
                    ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.WaitForPlaybackReadyState,
                                                                           MediaPlaybackErrorCode.WaitForPlaybackTimedOut,
                                                                           string.Format("Waiting for playback ready state @ {0:0.000} timed Out in {1} cycles", renderTime, cycleCount)));
                    return(false);
                }

                cycleCount++;

                // Wait for a decoding cycle.
                MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
                renderTime    = RealtimeClock.PositionSeconds;
                playbackFrame = PrimaryFramesCache.GetFrame(renderTime, CheckFrameBounds);

                if (playbackFrame == null && PrimaryFramesCache.Count > 0)
                {
                    playbackFrame = PrimaryFramesCache.FirstFrame;
                    RealtimeClock.PositionSeconds = playbackFrame.StartTime;
                }

                if (playbackFrame != null)
                {
                    break;
                }
            }

            // Do some additional logging
            System.Diagnostics.Debug.WriteLineIf(
                cycleCount >= 0,
                string.Format("WaitForPlaybackReadyState @ {0:0.000} = {1} cycles. Leading Frames: {2}, Frame Index: {3}, Frame Start: {4}",
                              renderTime,
                              cycleCount,
                              PrimaryFramesCache.Count,
                              PrimaryFramesCache.IndexOf(playbackFrame),
                              (playbackFrame != null ?
                               playbackFrame.StartTime.ToString("0.000") : "NULL")));

            return(true);
        }
Ejemplo n.º 6
0
    public void StartRecording()
    {
        // Start the microphone
        var microphoneFormat = Format.Default;

        NatMic.StartRecording(microphoneFormat, OnSampleBuffer);
        // Start recording
        recordingClock = new RealtimeClock();
        var audioFormat = new AudioFormat(microphoneFormat.sampleRate, microphoneFormat.channelCount);

        NatCorder.StartRecording(Container.MP4, VideoFormat.Screen, audioFormat, OnRecording);
        // Create a camera recorder for the main cam
        cameraRecorder = CameraRecorder.Create(recordingCamera, recordingClock);
    }
Ejemplo n.º 7
0
        public void StartRecording()
        {
            // Start recording
            var frameRate    = 30;
            var sampleRate   = recordMicrophone ? AudioSettings.outputSampleRate : 0;
            var channelCount = recordMicrophone ? (int)AudioSettings.speakerMode : 0;
            var clock        = new RealtimeClock();

            recorder = new MP4Recorder(videoWidth, videoHeight, frameRate, sampleRate, channelCount);
            // Create recording inputs
            cameraInput = new CameraInput(recorder, clock, Camera.main);
            audioInput  = recordMicrophone ? new AudioInput(recorder, clock, microphoneSource, true) : null;
            // Unmute microphone
            microphoneSource.mute = audioInput == null;
        }
        /// <summary>
        /// Seeks to the specified target second.
        /// </summary>
        /// <param name="targetSecond">The target second.</param>
        public void Seek(decimal targetSecond)
        {
            if (IsLiveStream)
            {
                return;
            }

            if (AudioRenderer.HasInitialized)
            {
                AudioRenderer.Stop();
            }

            MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
            RealtimeClock.Seek(targetSecond);
            this.m_Position = targetSecond;
            NotifyPlayStateChanged();
        }
        /// <summary>
        /// Rewinds and pauses media playback
        /// </summary>
        public void Stop()
        {
            if (IsLiveStream)
            {
                return;
            }

            if (AudioRenderer.HasInitialized)
            {
                AudioRenderer.Stop();
            }

            MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
            this.HasMediaEnded = false;
            if (Position > StartTime)
            {
                RealtimeClock.Seek(StartTime);
            }
        }
Ejemplo n.º 10
0
        IEnumerator first(float seconds)
        {
            yield return(new WaitForSeconds(seconds));



            // Start recording
            var frameRate    = 30;
            var sampleRate   = AudioSettings.outputSampleRate;
            var channelCount = (int)AudioSettings.speakerMode;
            var clock        = new RealtimeClock();

            recorder = new MP4Recorder(videoWidth, videoHeight, frameRate, sampleRate, channelCount);
            // Create recording inputs
            cameraInput = new CameraInput(recorder, clock, Camera.main);
            audioInput  = new AudioInput(recorder, clock, microphoneSource, true);
            // Unmute microphone
            microphoneSource.mute = audioInput == null;
        }
        public void StartRecording()
        {
            // Compute the video width dynamically to match the screen's aspect ratio
            var videoHeight = (int)(videoWidth / videoCamera.aspect);

            videoHeight = videoHeight >> 1 << 1; // Ensure divisible by 2
            // Create recorder and camera input
            var clock = new RealtimeClock();

            recorder    = new MP4Recorder(videoWidth, videoHeight, 30);
            cameraInput = new CameraInput(recorder, clock, videoCamera);
            // Attach an optimized frame input to the camera input for better performance
            if (Application.platform == RuntimePlatform.Android)
            {
                cameraInput.frameInput = new GLESRenderTextureInput(recorder, multithreading: true);
            }
            else if (Application.platform == RuntimePlatform.IPhonePlayer)
            {
                cameraInput.frameInput = new MTLRenderTextureInput(recorder, multithreading: true);
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Initializes the internal transcoder -- This create the input, processing, and output blocks that make
        /// up the video and audio decoding stream.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <param name="inputFormatName">Name of the input format. Leave null or empty to detect automatically</param>
        /// <param name="referer">The referer. Leave null or empty to skip setting it</param>
        /// <param name="userAgent">The user agent. Leave null or empty to skip setting it.</param>
        /// <exception cref="FileFormatException"></exception>
        /// <exception cref="Exception">Could not find stream info
        /// or
        /// Media must contain at least a video or and audio stream</exception>
        /// <exception cref="System.Exception">Could not open file
        /// or
        /// Could not find stream info
        /// or
        /// Media must contain a video stream
        /// or
        /// Media must contain an audio stream
        /// or
        /// Unsupported codec
        /// or
        /// Could not initialize the output conversion context
        /// or
        /// Could not create output codec context from input
        /// or
        /// Could not open codec</exception>
        private void InitializeMedia(string filePath, string inputFormatName, string referer, string userAgent)
        {
            // Create the input format context by opening the file
            InputFormatContext = ffmpeg.avformat_alloc_context();

            AVDictionary *optionsDict = null;

            if (string.IsNullOrWhiteSpace(userAgent) == false)
            {
                ffmpeg.av_dict_set(&optionsDict, "user-agent", userAgent, 0);
            }

            if (string.IsNullOrWhiteSpace(referer) == false)
            {
                ffmpeg.av_dict_set(&optionsDict, "headers", $"Referer:{referer}", 0);
            }

            ffmpeg.av_dict_set_int(&optionsDict, "usetoc", 1, 0);

            { // for m3u8 (HLS) streaming
                // TODO: maybe detect here if it is streaming? I need to test if this negatively affects filesystem files or network files as opposed to RTSP streams and HLS streams
                ffmpeg.av_dict_set_int(&optionsDict, "multiple_requests", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_at_eof", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_streamed", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_delay_max", (int)Constants.WaitForPlaybackReadyStateTimeout.TotalMilliseconds, 0);
            }


            AVInputFormat *inputFormat = null;

            if (string.IsNullOrWhiteSpace(inputFormatName) == false)
            {
                inputFormat = ffmpeg.av_find_input_format(inputFormatName);

                fixed(AVFormatContext **inputFormatContextRef = &InputFormatContext)
                {
                    if (ffmpeg.avformat_open_input(inputFormatContextRef, filePath, inputFormat, &optionsDict) != 0)
                    {
                        throw new FileFormatException(string.Format("Could not open stream or file '{0}'", filePath));
                    }
                }

                InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOBUFFER;
                InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOFILLIN;

                ffmpeg.av_dict_free(&optionsDict);

                // Extract the stream info headers from the file
                if (ffmpeg.avformat_find_stream_info(InputFormatContext, null) != 0)
                {
                    throw new Exception("Could not find stream info");
                }

                // search for the audio and video streams
                for (int i = 0; i < InputFormatContext->nb_streams; i++)
                {
                    var codecType = InputFormatContext->streams[i]->codec->codec_type;

                    if (codecType == AVMediaType.AVMEDIA_TYPE_VIDEO && InputVideoStream == null)
                    {
                        InputVideoStream = InputFormatContext->streams[i];
                        continue;
                    }

                    if (codecType == AVMediaType.AVMEDIA_TYPE_AUDIO && InputAudioStream == null)
                    {
                        InputAudioStream = InputFormatContext->streams[i];
                        continue;
                    }
                }

                if (InputVideoStream != null)
                {
                    this.InitializeVideo();
                    this.HasVideo = VideoBitrate > 0 || VideoFrameRate > 0M || VideoFrameWidth > 0 || VideoFrameHeight > 0;
                }

                if (InputAudioStream != null)
                {
                    this.InitializeAudio();
                    this.HasAudio = AudioBytesPerSample > 0;
                }

                if (HasAudio == false && HasVideo == false)
                {
                    throw new Exception("Media must contain at least a video or and audio stream");
                }
                else
                {
                    // General Properties here

                    NaturalDuration = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->duration) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                    IsLiveStream    = Helper.IsNoPtsValue(InputFormatContext->duration);
                    StartTime       = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->start_time) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                    EndTime         = StartTime + NaturalDuration;

                    RealtimeClock.Seek(StartTime);
                }
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Initializes the internal transcoder -- This create the input, processing, and output blocks that make
        /// up the video and audio decoding stream.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <exception cref="System.Exception">
        /// Could not open file
        /// or
        /// Could not find stream info
        /// or
        /// Media must contain a video stream
        /// or
        /// Media must contain an audio stream
        /// or
        /// Unsupported codec
        /// or
        /// Could not initialize the output conversion context
        /// or
        /// Could not create output codec context from input
        /// or
        /// Could not open codec
        /// </exception>
        private void InitializeMedia(string filePath)
        {
            // Create the input format context by opening the file
            InputFormatContext = ffmpeg.avformat_alloc_context();

            AVDictionary *optionsDict = null;

            ffmpeg.av_dict_set_int(&optionsDict, "usetoc", 1, 0);

            fixed(AVFormatContext **inputFormatContextRef = &InputFormatContext)
            {
                if (ffmpeg.avformat_open_input(inputFormatContextRef, filePath, null, &optionsDict) != 0)
                {
                    throw new Exception(string.Format("Could not open file '{0}'", filePath));
                }
            }

            //InputFormatContext->iformat->flags = InputFormatContext->iformat->flags | FFmpegInvoke.AVFMT_SEEK_TO_PTS;
            ffmpeg.av_dict_free(&optionsDict);

            // Extract the stream info headers from the file
            if (ffmpeg.avformat_find_stream_info(InputFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            // search for the audio and video streams
            for (int i = 0; i < InputFormatContext->nb_streams; i++)
            {
                var codecType = InputFormatContext->streams[i]->codec->codec_type;

                if (codecType == AVMediaType.AVMEDIA_TYPE_VIDEO && InputVideoStream == null)
                {
                    InputVideoStream = InputFormatContext->streams[i];
                    continue;
                }

                if (codecType == AVMediaType.AVMEDIA_TYPE_AUDIO && InputAudioStream == null)
                {
                    InputAudioStream = InputFormatContext->streams[i];
                    continue;
                }
            }

            if (InputVideoStream != null)
            {
                this.InitializeVideo();
                this.HasVideo = VideoBitrate > 0 || VideoFrameRate > 0M || VideoFrameWidth > 0 || VideoFrameHeight > 0;
            }

            if (InputAudioStream != null)
            {
                this.InitializeAudio();
                this.HasAudio = AudioBytesPerSample > 0;
            }

            if (HasAudio == false && HasVideo == false)
            {
                throw new Exception("Media must contain at least a video or and audio stream");
            }
            else
            {
                // General Properties here

                NaturalDuration = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->duration) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                IsLiveStream    = Helper.IsNoPtsValue(InputFormatContext->duration);
                StartTime       = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->start_time) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                EndTime         = StartTime + NaturalDuration;

                RealtimeClock.Seek(StartTime);
            }
        }