Esempio n. 1
0
    private void InitPingTimes()
    {
        pingTimes = new List <float>();
        HashSet <MediaRational> fracs = new HashSet <MediaRational>();
        float maxAllowedDistance      = waveTimeToLive * waveTimeToLive;

        for (int level = 0; ; ++level)
        {
            for (int i = 0; i <= level; ++i)
            {
                int   j = level - i;
                int   ys3 = i, x = 1 - i % 2 + j; // ys3 is y / sqrt(3)
                float yf = ys3 * (float)Math.Sqrt(3), xf = (float)x;
                float distance2 = yf * yf + xf * xf;
                if (distance2 > maxAllowedDistance)
                {
                    continue;
                }
                MediaRational rel = new MediaRational(x, ys3);
                if (fracs.Contains(rel))
                {
                    continue;
                }
                fracs.Add(rel);
                pingTimes.Add(Mathf.Sqrt(distance2) / waveSpeed);
            }
            if (level > maxAllowedDistance)
            {
                break;
            }
        }
    }
        protected internal override bool BeginRecording(RecordingSession session)
        {
            m_RecordingStartedProperly = false;
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", Settings.fileNameGenerator.BuildAbsolutePath(session)));
                return(false);
            }

            var input = m_Inputs[0] as BaseRenderTextureInput;

            if (input == null)
            {
                Debug.LogError("MediaRecorder could not find input.");
                return(false);
            }
            int width  = input.OutputWidth;
            int height = input.OutputHeight;

            if (width <= 0 || height <= 0)
            {
                Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height));
                return(false);
            }

            var    currentEncoderReg = Settings.GetCurrentEncoder();
            string erroMessage;

            if (!currentEncoderReg.SupportsResolution(Settings, width, height, out erroMessage))
            {
                Debug.LogError(erroMessage);
                return(false);
            }

            var imageInputSettings = m_Inputs[0].settings as ImageInputSettings;

            var alphaWillBeInImage = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.RecordTransparency;

            if (alphaWillBeInImage && !currentEncoderReg.SupportsTransparency(Settings, out erroMessage))
            {
                Debug.LogError(erroMessage);
                return(false);
            }

            // In variable frame rate mode, we set the encoder to the frame rate of the current display.
            m_FrameRate = RationalFromDouble(
                session.settings.FrameRatePlayback == FrameRatePlayback.Variable
                    ? GameHarness.DisplayFPSTarget
                    : session.settings.FrameRate);

            var videoAttrs = new VideoTrackAttributes
            {
                width        = (uint)width,
                height       = (uint)height,
                frameRate    = m_FrameRate,
                includeAlpha = alphaWillBeInImage,
                bitRateMode  = Settings.VideoBitRateMode
            };

            Debug.Log($"(UnityRecorder/MovieRecorder) Encoding video " +
                      $"{width}x{height}@[{videoAttrs.frameRate.numerator}/{videoAttrs.frameRate.denominator}] fps into " +
                      $"{Settings.fileNameGenerator.BuildAbsolutePath(session)}");

            var audioInput     = (AudioInputBase)m_Inputs[1];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.PreserveAudio)
            {
#if UNITY_EDITOR_OSX
                // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there
                // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically
                // increasing timestamps"). This happens only with Target Cameras.
                if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == VideoRecorderOutputFormat.WebM)
                {
                    UseAsyncGPUReadback = false;
                }
#endif
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator));
                }
            }
            else
            {
                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log("MovieRecorder starting with no audio.");
                }
            }

            try
            {
                var path = Settings.fileNameGenerator.BuildAbsolutePath(session);

                // If an encoder already exist destroy it
                Settings.DestroyIfExists(m_EncoderHandle);

                // Get the currently selected encoder register and create an encoder
                m_EncoderHandle = currentEncoderReg.Register(Settings.m_EncoderManager);

                // Create the list of attributes for the encoder, Video, Audio and preset
                // TODO: Query the list of attributes from the encoder attributes
                var attr = new List <IMediaEncoderAttribute>();
                attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", videoAttrs));

                if (audioInput.audioSettings.PreserveAudio)
                {
                    if (audioAttrsList.Count > 0)
                    {
                        attr.Add(new AudioTrackMediaEncoderAttribute("AudioAttributes", audioAttrsList.ToArray()[0]));
                    }
                }

                attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CodecFormat], Settings.encoderPresetSelected));
                attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.ColorDefinition], Settings.encoderColorDefinitionSelected));

                if (Settings.encoderPresetSelectedName == "Custom")
                {
                    // For custom
                    attr.Add(new StringAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CustomOptions], Settings.encoderCustomOptions));
                }
                // Construct the encoder given the list of attributes
                Settings.m_EncoderManager.Construct(m_EncoderHandle, path, attr);

                s_ConcurrentCount++;

                m_RecordingStartedProperly = true;
                return(true);
            }
            catch (Exception ex)
            {
                Debug.LogError("MovieRecorder unable to create MovieEncoder. " + ex.Message);
                return(false);
            }
        }
Esempio n. 3
0
 /// <summary>
 /// <see cref="av_packet_rescale_ts(AVPacket*, AVRational, AVRational)"/>
 /// </summary>
 public void RescaleTimestamp(MediaRational source, MediaRational dest) => av_packet_rescale_ts(this, source, dest);