public void Construct(string path, UnityEditor.Media.VideoTrackAttributes vAttr) { List <IMediaEncoderAttribute> attr = new List <IMediaEncoderAttribute>(); attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", vAttr)); Construct(path, attr); }
public void Construct(string path, UnityEditor.Media.VideoTrackAttributes vAttr, NativeArray <UnityEditor.Media.AudioTrackAttributes> aAttr) { List <IMediaEncoderAttribute> attr = new List <IMediaEncoderAttribute>(); attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", vAttr)); foreach (var a in aAttr) { attr.Add(new AudioTrackMediaEncoderAttribute("VideoAttributes", a)); } Construct(path, attr); }
public void StartEncoding() { Vector2 imageSize = new Vector2(1920, 1080); float fps = 30.0f; float fAudioSamplingRate = 44100.0f; _hasAudio = false; _hasAlpha = false; ProResOut.ProResCodecFormat codecFormat = ProResOut.ProResCodecFormat.ProRes4444XQ; int colorDefinition = 0; // a value that matches ProResCommon::ColorDescription enum in C++ wrappers UnityEditor.Media.VideoTrackAttributes vAttr = new UnityEditor.Media.VideoTrackAttributes(); UnityEditor.Media.AudioTrackAttributes aAttr = new UnityEditor.Media.AudioTrackAttributes(); IMediaEncoderAttribute attr; if (m_Attributes.TryGetValue(AttributeLabels[MovieRecorderSettingsAttributes.CodecFormat], out attr)) { var presetSelectedAttr = (IntAttribute)(attr); codecFormat = (ProResOut.ProResCodecFormat)presetSelectedAttr.Value; } if (m_Attributes.TryGetValue(AttributeLabels[MovieRecorderSettingsAttributes.ColorDefinition], out attr)) { var presetSelectedAttr = (IntAttribute)(attr); // Map UI value to wrapper enum values for the ProRes wrapper libraries. // We need to do this because the final value for the wrapper call most likely does not // match the UI dropdown index (depends what enum values are exposed). var enumValue = GetProResColorDefinitionFromExposedIndex(presetSelectedAttr.Value); colorDefinition = GetProResWrapperColorDefinition(enumValue); } if (m_Attributes.TryGetValue("ImageSize", out attr)) { var imageSizeAttribute = (Vector2Attribute)(attr); imageSize = imageSizeAttribute.Value; } if (m_Attributes.TryGetValue("Filename", out attr)) { var stringAttribute = (StringAttribute)(attr); _rawVideoFilename = stringAttribute.Value; } if (m_Attributes.TryGetValue("VideoAttributes", out attr)) { var vmAttr = (VideoTrackMediaEncoderAttribute)(attr); var vidAttr = vmAttr.Value; vAttr = vidAttr; imageSize.x = vAttr.width; imageSize.y = vAttr.height; fps = vAttr.frameRate.numerator / (float)vAttr.frameRate.denominator; _hasAlpha = vAttr.includeAlpha; } if (m_Attributes.TryGetValue("AudioAttributes", out attr)) { var amAttr = (AudioTrackMediaEncoderAttribute)(attr); var audioAttr = amAttr.Value; fAudioSamplingRate = audioAttr.sampleRate.numerator / (float)audioAttr.sampleRate.denominator; aAttr = audioAttr; _hasAudio = true; } int nCodecFormat = (int)codecFormat; #if UNITY_STANDALONE_OSX // Ensure that this codec format is supported, because on macOS we depend on AVFoundation in the OS System.Text.StringBuilder sb = new System.Text.StringBuilder(128); bool supported = ProResWrapperHelpers.SupportsCodecFormat(nCodecFormat, sb, sb.Capacity); string sSupported = sb.ToString(); if (!supported) { Debug.LogError(string.Format("Could not create file {0}: {1}", _rawVideoFilename, sb.ToString())); _encoderPtr = IntPtr.Zero; return; } #endif // Prepare the file _encoderPtr = ProResWrapper.Create(UnityHelpers.PackageDescription, _rawVideoFilename, (int)imageSize.x, (int)imageSize.y, fps, _hasAudio, fAudioSamplingRate, nCodecFormat, _hasAlpha, colorDefinition); if (_encoderPtr == IntPtr.Zero) { Debug.LogError(string.Format("Could not create file {0}", _rawVideoFilename)); } }