/// <inheritdoc/> public void Import(SpriteFontAsset options, List <char> characters) { fontSource = options.FontSource.GetFontPath(); if (string.IsNullOrEmpty(fontSource)) { return; } // Get the msdfgen.exe location var msdfgen = ToolLocator.LocateTool("msdfgen.exe") ?? throw new AssetException("Failed to compile a font asset, msdfgen was not found."); msdfgenExe = msdfgen.FullPath; tempDir = $"{Environment.GetEnvironmentVariable("TEMP")}\\"; var factory = new Factory(); FontFace fontFace = options.FontSource.GetFontFace(); var fontMetrics = fontFace.Metrics; // Create a bunch of GDI+ objects. var fontSize = options.FontType.Size; var glyphList = new List <Glyph>(); // Remap the LineMap coming from the font with a user defined remapping // Note: // We are remapping the lineMap to allow to shrink the LineGap and to reposition it at the top and/or bottom of the // font instead of using only the top // According to http://stackoverflow.com/questions/13939264/how-to-determine-baseline-position-using-directwrite#comment27947684_14061348 // (The response is from a MSFT employee), the BaseLine should be = LineGap + Ascent but this is not what // we are experiencing when comparing with MSWord (LineGap + Ascent seems to offset too much.) // // So we are first applying a factor to the line gap: // NewLineGap = LineGap * LineGapFactor var lineGap = fontMetrics.LineGap * options.LineGapFactor; // Store the font height. LineSpacing = (float)(lineGap + fontMetrics.Ascent + fontMetrics.Descent) / fontMetrics.DesignUnitsPerEm * fontSize; // And then the baseline is also changed in order to allow the linegap to be distributed between the top and the // bottom of the font: // BaseLine = NewLineGap * LineGapBaseLineFactor BaseLine = (float)(lineGap * options.LineGapBaseLineFactor + fontMetrics.Ascent) / fontMetrics.DesignUnitsPerEm * fontSize; // Generate SDF bitmaps for each character in turn. foreach (var character in characters) { glyphList.Add(ImportGlyph(fontFace, character, fontMetrics, fontSize)); } Glyphs = glyphList; factory.Dispose(); }
/// <inheritdoc /> protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext) { // Get path to ffmpeg var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a sound asset, ffmpeg was not found."); // Get absolute path of asset source on disk var assetDirectory = Parameters.Source.GetParent(); var assetSource = UPath.Combine(assetDirectory, Parameters.Source); // Execute ffmpeg to convert source to PCM and then encode with Celt var tempFile = Path.GetTempFileName(); try { var channels = Parameters.Spatialized ? 1 : 2; var commandLine = " -hide_banner -loglevel error" + // hide most log output $" -i \"{assetSource.ToWindowsPath()}\"" + // input file $" -f f32le -acodec pcm_f32le -ac {channels} -ar {Parameters.SampleRate}" + // codec $" -map 0:{Parameters.Index}" + // stream index $" -y \"{tempFile}\""; // output file (always overwrite) var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger); if (ret != 0 || commandContext.Logger.HasErrors) { throw new AssetException($"Failed to compile a sound asset, ffmpeg failed to convert {assetSource}"); } var encoder = new Celt(Parameters.SampleRate, CompressedSoundSource.SamplesPerFrame, channels, false); var uncompressed = CompressedSoundSource.SamplesPerFrame * channels * sizeof(short); //compare with int16 for CD quality comparison.. but remember we are dealing with 32 bit floats for encoding!! var target = (int)Math.Floor(uncompressed / (float)Parameters.CompressionRatio); var dataUrl = Url + "_Data"; var newSound = new Sound { CompressedDataUrl = dataUrl, Channels = channels, SampleRate = Parameters.SampleRate, StreamFromDisk = Parameters.StreamFromDisk, Spatialized = Parameters.Spatialized, }; //make sure we don't compress celt data commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag); var delay = encoder.GetDecoderSampleDelay(); var frameSize = CompressedSoundSource.SamplesPerFrame * channels; using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read))) using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable)) { var writer = new BinarySerializationWriter(outputStream); var outputBuffer = new byte[target]; var buffer = new float[frameSize]; var count = 0; var padding = sizeof(float) * channels * delay; var length = reader.BaseStream.Length; // Cache the length, because this getter is expensive to use for (var position = 0; position < length + padding; position += sizeof(float)) { if (count == frameSize) //flush { var len = encoder.Encode(buffer, outputBuffer); writer.Write((short)len); outputStream.Write(outputBuffer, 0, len); newSound.Samples += count / channels; newSound.NumberOfPackets++; newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len); count = 0; Array.Clear(buffer, 0, frameSize); } // Pad with 0 once we reach end of stream (this is needed because of encoding delay) buffer[count++] = (position < length) ? reader.ReadSingle() : 0.0f; } if (count > 0) //flush { var len = encoder.Encode(buffer, outputBuffer); writer.Write((short)len); outputStream.Write(outputBuffer, 0, len); newSound.Samples += count / channels; newSound.NumberOfPackets++; newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len); } } // Samples is the real sound sample count, remove the delay at the end newSound.Samples -= delay; var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService); assetManager.Save(Url, newSound); return(ResultStatus.Successful); } finally { File.Delete(tempFile); } }
/// <inheritdoc /> protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext) { VideoAsset videoAsset = Parameters.Video; try { // Get path to ffmpeg var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a video asset, ffmpeg was not found."); // Get absolute path of asset source on disk var assetDirectory = videoAsset.Source.GetParent(); var assetSource = UPath.Combine(assetDirectory, videoAsset.Source); //===================================================================================== //Get the info from the video codec //Check if we need to reencode the video var mustReEncodeVideo = false; var sidedataStripCommand = ""; // check that the video file format is supported if (Parameters.Platform == PlatformType.Windows && videoAsset.Source.GetFileExtension() != ".mp4") { mustReEncodeVideo = true; } //Use FFmpegMedia object (need to check more details first before I can use it) VideoStream videoStream = null; AudioStream audioStream = null; FFmpegUtils.PreloadLibraries(); FFmpegUtils.Initialize(); using (var media = new FFmpegMedia()) { media.Open(assetSource.ToWindowsPath()); // Get the first video stream videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault(); if (videoStream == null) { throw new AssetException("Failed to compile a video asset. Did not find the VideoStream from the media."); } // On windows MediaEngineEx player only decode the first video if the video is detected as a stereoscopic video, // so we remove the tags inside the video in order to ensure the same behavior as on other platforms (side by side decoded texture) // Unfortunately it does seem possible to disable this behavior from the MediaEngineEx API. if (Parameters.Platform == PlatformType.Windows && media.IsStereoscopicVideo(videoStream)) { mustReEncodeVideo = true; sidedataStripCommand = "-vf sidedata=delete"; } // Get the first audio stream audioStream = media.Streams.OfType <AudioStream>().FirstOrDefault(); } Size2 videoSize = new Size2(videoStream.Width, videoStream.Height); //check the format if (ListSupportedCodecNames != null) { if (Array.IndexOf(ListSupportedCodecNames, videoStream.Codec) < 0) { mustReEncodeVideo = true; } } // check if video need to be trimmed var videoDuration = videoAsset.VideoDuration; if (videoDuration.Enabled && (videoDuration.StartTime != TimeSpan.Zero || videoDuration.EndTime.TotalSeconds < videoStream.Duration.TotalSeconds - MathUtil.ZeroToleranceDouble)) { mustReEncodeVideo = true; } //check the video target and source resolution Size2 targetSize; if (videoAsset.IsSizeInPercentage) { targetSize = new Size2((int)(videoSize.Width * videoAsset.Width / 100.0f), (int)(videoSize.Height * videoAsset.Height / 100.0f)); } else { targetSize = new Size2((int)(videoAsset.Width), (int)(videoAsset.Height)); } // ensure that the size is a multiple of 2 (ffmpeg cannot output video not multiple of 2, at least with this codec) if (targetSize.Width % 2 == 1) { targetSize.Width += 1; } if (targetSize.Height % 2 == 1) { targetSize.Height += 1; } if (targetSize.Width != videoSize.Width || targetSize.Height != videoSize.Height) { mustReEncodeVideo = true; } //check the audio settings int audioChannelsTarget = audioStream == null? 0: audioStream.ChannelCount; bool mustReEncodeAudioChannels = false; if (videoAsset.IsAudioChannelMono) { audioChannelsTarget = 1; if (audioStream != null && audioStream.ChannelCount != audioChannelsTarget) { mustReEncodeAudioChannels = true; mustReEncodeVideo = true; } } // Execute ffmpeg to convert source to H.264 string tempFile = null; try { if (mustReEncodeVideo) { string targetCodecFormat = "h264"; //hardcodec for now commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". Re-encode the Video. Format:{1}, Size:{2}x{3}. Audio Channels:{4}", videoAsset.Source.GetFileName(), targetCodecFormat, targetSize.Width, targetSize.Height, audioChannelsTarget)); tempFile = Path.GetTempFileName(); string channelFlag = ""; if (mustReEncodeAudioChannels) { channelFlag = string.Format(" -ac {0}", audioChannelsTarget); } var startTime = videoDuration.StartTime; var duration = videoDuration.EndTime - videoDuration.StartTime; var trimmingOptions = videoDuration.Enabled ? $" -ss {startTime.Hours:D2}:{startTime.Minutes:D2}:{startTime.Seconds:D2}.{startTime.Milliseconds:D3}" + $" -t {duration.Hours:D2}:{duration.Minutes:D2}:{duration.Seconds:D2}.{duration.Milliseconds:D3}": ""; var commandLine = " -hide_banner -loglevel error" + // hide most log output " -nostdin" + // no interaction (background process) $" -i \"{assetSource.ToWindowsPath()}\"" + // input file $"{trimmingOptions}" + " -f mp4 -vcodec " + targetCodecFormat + // codec channelFlag + // audio channels $" -vf scale={targetSize.Width}:{targetSize.Height} " + // adjust the resolution sidedataStripCommand + // strip of stereoscopic sidedata tag //" -an" + // no audio //" -pix_fmt yuv422p" + // pixel format (planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)) $" -y \"{tempFile}\""; // output file (always overwrite) var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger); if (ret != 0 || commandContext.Logger.HasErrors) { throw new AssetException($"Failed to compile a video asset. ffmpeg failed to convert {assetSource}."); } } else { commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". No Re-encoding necessary", videoAsset.Source.GetFileName())); // Use temporary file tempFile = assetSource.ToWindowsPath(); } var dataUrl = Url + "_Data"; var video = new Video.Video { CompressedDataUrl = dataUrl, }; // Make sure we don't compress h264 data commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag); // Write the data using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read))) using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable)) { // For now write everything at once, 1MB at a time var length = reader.BaseStream.Length; for (var position = 0L; position < length; position += 2 << 20) { var buffer = reader.ReadBytes(2 << 20); outputStream.Write(buffer, 0, buffer.Length); } } var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService); assetManager.Save(Url, video); return(ResultStatus.Successful); } finally { if (mustReEncodeVideo) { if (tempFile != null) { File.Delete(tempFile); } } } } catch (AssetException) { throw; } catch (Exception ex) { throw new AssetException("Failed to compile a video asset. Unexpected exception.", ex); } }