Exemple #1
0
        protected override Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
        {
            // This path for effects xml is now part of this tool, but it should be done in a separate exporter?
            using (var inputStream = File.OpenRead(SourcePath))
                using (var outputStream = ContentManager.FileProvider.OpenStream(Location, VirtualFileMode.Create, VirtualFileAccess.Write))
                {
                    inputStream.CopyTo(outputStream);

                    var objectURL = new ObjectUrl(UrlType.ContentLink, Location);

                    if (DisableCompression)
                    {
                        commandContext.AddTag(objectURL, disableCompressionSymbol);
                    }
                }

            if (SaveSourcePath)
            {
                // store absolute path to source
                // TODO: the "/path" is hardcoded, used in EffectSystem and ShaderSourceManager. Find a place to share this correctly.
                var pathLocation = new UFile(Location.FullPath + "/path");
                using (var outputStreamPath = ContentManager.FileProvider.OpenStream(pathLocation, VirtualFileMode.Create, VirtualFileAccess.Write))
                {
                    using (var sw = new StreamWriter(outputStreamPath))
                    {
                        sw.Write(SourcePath.FullPath);
                    }
                }
            }

            return(Task.FromResult(ResultStatus.Successful));
        }
        protected override Task<ResultStatus> DoCommandOverride(ICommandContext commandContext)
        {
            // This path for effects xml is now part of this tool, but it should be done in a separate exporter?
            using (var inputStream = File.OpenRead(SourcePath))
            using (var outputStream = AssetManager.FileProvider.OpenStream(Location, VirtualFileMode.Create, VirtualFileAccess.Write))
            {
                inputStream.CopyTo(outputStream);

                var objectURL = new ObjectUrl(UrlType.ContentLink, Location);

                if (DisableCompression)
                    commandContext.AddTag(objectURL, DisableCompressionSymbol);
            }

            if (SaveSourcePath)
            {
                // store absolute path to source
                // TODO: the "/path" is hardcoded, used in EffectSystem and ShaderSourceManager. Find a place to share this correctly.
                var pathLocation = new UFile(Location.FullPath + "/path");
                using (var outputStreamPath = AssetManager.FileProvider.OpenStream(pathLocation, VirtualFileMode.Create, VirtualFileAccess.Write))
                {
                    using (var sw = new StreamWriter(outputStreamPath))
                    {
                        sw.Write(SourcePath.FullPath);
                    }
                }
            }

            return Task.FromResult(ResultStatus.Successful);
        }
Exemple #3
0
            protected override Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                var assetManager = new ContentManager();

                // Get absolute path of asset source on disk
                var assetDirectory = Parameters.Source.GetParent();
                var assetSource    = UPath.Combine(assetDirectory, Parameters.Source);

                var installationDir = DirectoryHelper.GetPackageDirectory("Xenko");
                var binDir          = UPath.Combine(installationDir, new UDirectory("Bin"));

                binDir = UPath.Combine(binDir, new UDirectory("Windows"));
                var ffmpeg = UPath.Combine(binDir, new UFile("ffmpeg.exe"));

                if (!File.Exists(ffmpeg))
                {
                    throw new AssetException("Failed to compile a sound asset, ffmpeg was not found.");
                }

                var channels    = Parameters.Spatialized ? 1 : 2;
                var tempPcmFile = Path.GetTempFileName();
                var ret         = RunProcessAndGetOutput(ffmpeg, $"-i \"{assetSource}\" -f f32le -acodec pcm_f32le -ac {channels} -ar {Parameters.SampleRate} -y \"{tempPcmFile}\"");

                if (ret != 0)
                {
                    File.Delete(tempPcmFile);
                    throw new AssetException($"Failed to compile a sound asset, ffmpeg failed to convert {assetSource}");
                }

                var encoder = new Celt(Parameters.SampleRate, CompressedSoundSource.SamplesPerFrame, channels, false);

                var uncompressed = CompressedSoundSource.SamplesPerFrame * channels * sizeof(short); //compare with int16 for CD quality comparison.. but remember we are dealing with 32 bit floats for encoding!!
                var target       = (int)Math.Floor(uncompressed / (float)Parameters.CompressionRatio);

                var dataUrl  = Url + "_Data";
                var newSound = new Sound
                {
                    CompressedDataUrl = dataUrl,
                    Channels          = channels,
                    SampleRate        = Parameters.SampleRate,
                    StreamFromDisk    = Parameters.StreamFromDisk,
                    Spatialized       = Parameters.Spatialized,
                };

                //make sure we don't compress celt data
                commandContext.AddTag(new ObjectUrl(UrlType.ContentLink, dataUrl), disableCompressionSymbol);

                var frameSize = CompressedSoundSource.SamplesPerFrame * channels;

                using (var reader = new BinaryReader(new FileStream(tempPcmFile, FileMode.Open, FileAccess.Read)))
                    using (var outputStream = ContentManager.FileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                    {
                        var writer = new BinarySerializationWriter(outputStream);

                        var outputBuffer = new byte[target];
                        var buffer       = new float[frameSize];
                        var count        = 0;
                        var length       = reader.BaseStream.Length; // Cache the length, because this getter is expensive to use
                        for (var position = 0; position < length; position += sizeof(float))
                        {
                            if (count == frameSize) //flush
                            {
                                var len = encoder.Encode(buffer, outputBuffer);
                                writer.Write((short)len);
                                outputStream.Write(outputBuffer, 0, len);

                                count = 0;
                                Array.Clear(buffer, 0, frameSize);

                                newSound.NumberOfPackets++;
                                newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                            }

                            buffer[count] = reader.ReadSingle();
                            count++;
                        }

                        if (count > 0) //flush
                        {
                            var len = encoder.Encode(buffer, outputBuffer);
                            writer.Write((short)len);
                            outputStream.Write(outputBuffer, 0, len);

                            newSound.NumberOfPackets++;
                            newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                        }
                    }

                File.Delete(tempPcmFile);

                assetManager.Save(Url, newSound);

                return(Task.FromResult(ResultStatus.Successful));
            }
Exemple #4
0
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                // Get path to ffmpeg
                var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a sound asset, ffmpeg was not found.");

                // Get absolute path of asset source on disk
                var assetDirectory = Parameters.Source.GetParent();
                var assetSource    = UPath.Combine(assetDirectory, Parameters.Source);

                // Execute ffmpeg to convert source to PCM and then encode with Celt
                var tempFile = Path.GetTempFileName();

                try
                {
                    var channels    = Parameters.Spatialized ? 1 : 2;
                    var commandLine = "  -hide_banner -loglevel error" +                                          // hide most log output
                                      $" -i \"{assetSource.ToWindowsPath()}\"" +                                  // input file
                                      $" -f f32le -acodec pcm_f32le -ac {channels} -ar {Parameters.SampleRate}" + // codec
                                      $" -map 0:{Parameters.Index}" +                                             // stream index
                                      $" -y \"{tempFile}\"";                                                      // output file (always overwrite)
                    var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                    if (ret != 0 || commandContext.Logger.HasErrors)
                    {
                        throw new AssetException($"Failed to compile a sound asset, ffmpeg failed to convert {assetSource}");
                    }

                    var encoder = new Celt(Parameters.SampleRate, CompressedSoundSource.SamplesPerFrame, channels, false);

                    var uncompressed = CompressedSoundSource.SamplesPerFrame * channels * sizeof(short); //compare with int16 for CD quality comparison.. but remember we are dealing with 32 bit floats for encoding!!
                    var target       = (int)Math.Floor(uncompressed / (float)Parameters.CompressionRatio);

                    var dataUrl  = Url + "_Data";
                    var newSound = new Sound
                    {
                        CompressedDataUrl = dataUrl,
                        Channels          = channels,
                        SampleRate        = Parameters.SampleRate,
                        StreamFromDisk    = Parameters.StreamFromDisk,
                        Spatialized       = Parameters.Spatialized,
                    };

                    //make sure we don't compress celt data
                    commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                    var delay = encoder.GetDecoderSampleDelay();

                    var frameSize = CompressedSoundSource.SamplesPerFrame * channels;
                    using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                        using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                        {
                            var writer = new BinarySerializationWriter(outputStream);

                            var outputBuffer = new byte[target];
                            var buffer       = new float[frameSize];
                            var count        = 0;
                            var padding      = sizeof(float) * channels * delay;
                            var length       = reader.BaseStream.Length; // Cache the length, because this getter is expensive to use
                            for (var position = 0; position < length + padding; position += sizeof(float))
                            {
                                if (count == frameSize) //flush
                                {
                                    var len = encoder.Encode(buffer, outputBuffer);
                                    writer.Write((short)len);
                                    outputStream.Write(outputBuffer, 0, len);

                                    newSound.Samples += count / channels;
                                    newSound.NumberOfPackets++;
                                    newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);

                                    count = 0;
                                    Array.Clear(buffer, 0, frameSize);
                                }

                                // Pad with 0 once we reach end of stream (this is needed because of encoding delay)
                                buffer[count++] = (position < length)
                                    ? reader.ReadSingle()
                                    : 0.0f;
                            }

                            if (count > 0) //flush
                            {
                                var len = encoder.Encode(buffer, outputBuffer);
                                writer.Write((short)len);
                                outputStream.Write(outputBuffer, 0, len);

                                newSound.Samples += count / channels;
                                newSound.NumberOfPackets++;
                                newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                            }
                        }

                    // Samples is the real sound sample count, remove the delay at the end
                    newSound.Samples -= delay;

                    var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                    assetManager.Save(Url, newSound);

                    return(ResultStatus.Successful);
                }
                finally
                {
                    File.Delete(tempFile);
                }
            }
Exemple #5
0
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                VideoAsset videoAsset = Parameters.Video;

                try
                {
                    // Get path to ffmpeg
                    var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a video asset, ffmpeg was not found.");

                    // Get absolute path of asset source on disk
                    var assetDirectory = videoAsset.Source.GetParent();
                    var assetSource    = UPath.Combine(assetDirectory, videoAsset.Source);

                    //=====================================================================================
                    //Get the info from the video codec

                    //Check if we need to reencode the video
                    var mustReEncodeVideo    = false;
                    var sidedataStripCommand = "";

                    // check that the video file format is supported
                    if (Parameters.Platform == PlatformType.Windows && videoAsset.Source.GetFileExtension() != ".mp4")
                    {
                        mustReEncodeVideo = true;
                    }

                    //Use FFmpegMedia object (need to check more details first before I can use it)
                    VideoStream videoStream = null;
                    AudioStream audioStream = null;
                    FFmpegUtils.PreloadLibraries();
                    FFmpegUtils.Initialize();
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(assetSource.ToWindowsPath());

                        // Get the first video stream
                        videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                        if (videoStream == null)
                        {
                            throw new AssetException("Failed to compile a video asset. Did not find the VideoStream from the media.");
                        }

                        // On windows MediaEngineEx player only decode the first video if the video is detected as a stereoscopic video,
                        // so we remove the tags inside the video in order to ensure the same behavior as on other platforms (side by side decoded texture)
                        // Unfortunately it does seem possible to disable this behavior from the MediaEngineEx API.
                        if (Parameters.Platform == PlatformType.Windows && media.IsStereoscopicVideo(videoStream))
                        {
                            mustReEncodeVideo    = true;
                            sidedataStripCommand = "-vf sidedata=delete";
                        }

                        // Get the first audio stream
                        audioStream = media.Streams.OfType <AudioStream>().FirstOrDefault();
                    }
                    Size2 videoSize = new Size2(videoStream.Width, videoStream.Height);

                    //check the format
                    if (ListSupportedCodecNames != null)
                    {
                        if (Array.IndexOf(ListSupportedCodecNames, videoStream.Codec) < 0)
                        {
                            mustReEncodeVideo = true;
                        }
                    }

                    // check if video need to be trimmed
                    var videoDuration = videoAsset.VideoDuration;
                    if (videoDuration.Enabled && (videoDuration.StartTime != TimeSpan.Zero ||
                                                  videoDuration.EndTime.TotalSeconds < videoStream.Duration.TotalSeconds - MathUtil.ZeroToleranceDouble))
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the video target and source resolution
                    Size2 targetSize;
                    if (videoAsset.IsSizeInPercentage)
                    {
                        targetSize = new Size2((int)(videoSize.Width * videoAsset.Width / 100.0f), (int)(videoSize.Height * videoAsset.Height / 100.0f));
                    }
                    else
                    {
                        targetSize = new Size2((int)(videoAsset.Width), (int)(videoAsset.Height));
                    }

                    // ensure that the size is a multiple of 2 (ffmpeg cannot output video not multiple of 2, at least with this codec)
                    if (targetSize.Width % 2 == 1)
                    {
                        targetSize.Width += 1;
                    }
                    if (targetSize.Height % 2 == 1)
                    {
                        targetSize.Height += 1;
                    }

                    if (targetSize.Width != videoSize.Width || targetSize.Height != videoSize.Height)
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the audio settings
                    int  audioChannelsTarget       = audioStream == null? 0: audioStream.ChannelCount;
                    bool mustReEncodeAudioChannels = false;
                    if (videoAsset.IsAudioChannelMono)
                    {
                        audioChannelsTarget = 1;
                        if (audioStream != null && audioStream.ChannelCount != audioChannelsTarget)
                        {
                            mustReEncodeAudioChannels = true;
                            mustReEncodeVideo         = true;
                        }
                    }

                    // Execute ffmpeg to convert source to H.264
                    string tempFile = null;
                    try
                    {
                        if (mustReEncodeVideo)
                        {
                            string targetCodecFormat = "h264";  //hardcodec for now
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". Re-encode the Video. Format:{1}, Size:{2}x{3}. Audio Channels:{4}",
                                                                     videoAsset.Source.GetFileName(), targetCodecFormat, targetSize.Width, targetSize.Height, audioChannelsTarget));

                            tempFile = Path.GetTempFileName();
                            string channelFlag = "";
                            if (mustReEncodeAudioChannels)
                            {
                                channelFlag = string.Format(" -ac {0}", audioChannelsTarget);
                            }

                            var startTime       = videoDuration.StartTime;
                            var duration        = videoDuration.EndTime - videoDuration.StartTime;
                            var trimmingOptions = videoDuration.Enabled ?
                                                  $" -ss {startTime.Hours:D2}:{startTime.Minutes:D2}:{startTime.Seconds:D2}.{startTime.Milliseconds:D3}" +
                                                  $" -t {duration.Hours:D2}:{duration.Minutes:D2}:{duration.Seconds:D2}.{duration.Milliseconds:D3}":
                                                  "";

                            var commandLine = "  -hide_banner -loglevel error" +                       // hide most log output
                                              "  -nostdin" +                                           // no interaction (background process)
                                              $" -i \"{assetSource.ToWindowsPath()}\"" +               // input file
                                              $"{trimmingOptions}" +
                                              "  -f mp4 -vcodec " + targetCodecFormat +                // codec
                                              channelFlag +                                            // audio channels
                                              $"  -vf scale={targetSize.Width}:{targetSize.Height} " + // adjust the resolution
                                              sidedataStripCommand +                                   // strip of stereoscopic sidedata tag
                                                                                                       //" -an" + // no audio
                                                                                                       //" -pix_fmt yuv422p" + // pixel format (planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples))
                                              $" -y \"{tempFile}\"";                                   // output file (always overwrite)
                            var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                            if (ret != 0 || commandContext.Logger.HasErrors)
                            {
                                throw new AssetException($"Failed to compile a video asset. ffmpeg failed to convert {assetSource}.");
                            }
                        }
                        else
                        {
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". No Re-encoding necessary",
                                                                     videoAsset.Source.GetFileName()));

                            // Use temporary file
                            tempFile = assetSource.ToWindowsPath();
                        }

                        var dataUrl = Url + "_Data";
                        var video   = new Video.Video
                        {
                            CompressedDataUrl = dataUrl,
                        };

                        // Make sure we don't compress h264 data
                        commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                        // Write the data
                        using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                            using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                            {
                                // For now write everything at once, 1MB at a time
                                var length = reader.BaseStream.Length;
                                for (var position = 0L; position < length; position += 2 << 20)
                                {
                                    var buffer = reader.ReadBytes(2 << 20);
                                    outputStream.Write(buffer, 0, buffer.Length);
                                }
                            }

                        var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                        assetManager.Save(Url, video);

                        return(ResultStatus.Successful);
                    }
                    finally
                    {
                        if (mustReEncodeVideo)
                        {
                            if (tempFile != null)
                            {
                                File.Delete(tempFile);
                            }
                        }
                    }
                }
                catch (AssetException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    throw new AssetException("Failed to compile a video asset. Unexpected exception.", ex);
                }
            }
Exemple #6
0
        public static ResultStatus ImportStreamableTextureImage(ContentManager assetManager, TextureTool textureTool, TexImage texImage, TextureHelper.ImportParameters convertParameters, CancellationToken cancellationToken, ICommandContext commandContext)
        {
            // Perform normal texture importing (but don't save it to file now)
            var importResult = TextureHelper.ImportTextureImageRaw(textureTool, texImage, convertParameters, cancellationToken, commandContext.Logger);

            if (importResult != ResultStatus.Successful)
            {
                return(importResult);
            }

            // Make sure we don't compress mips data
            var dataUrl = convertParameters.OutputUrl + "_Data";

            commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

            using (var outputImage = textureTool.ConvertToStrideImage(texImage))
            {
                if (cancellationToken.IsCancellationRequested)
                {
                    return(ResultStatus.Cancelled);
                }

                // Create texture mips data containers (storage all array slices for every mip in separate chunks)
                var           desc     = outputImage.Description;
                List <byte[]> mipsData = new List <byte[]>(desc.MipLevels);
                for (int mipIndex = 0; mipIndex < desc.MipLevels; mipIndex++)
                {
                    int totalSize = 0;
                    for (int arrayIndex = 0; arrayIndex < desc.ArraySize; arrayIndex++)
                    {
                        var pixelBuffer = outputImage.GetPixelBuffer(arrayIndex, 0, mipIndex);
                        totalSize += pixelBuffer.BufferStride;
                    }

                    var buf        = new byte[totalSize];
                    int startIndex = 0;
                    for (int arrayIndex = 0; arrayIndex < desc.ArraySize; arrayIndex++)
                    {
                        var pixelBuffer = outputImage.GetPixelBuffer(arrayIndex, 0, mipIndex);
                        int size        = pixelBuffer.BufferStride;

                        Marshal.Copy(pixelBuffer.DataPointer, buf, startIndex, size);
                        startIndex += size;
                    }
                    mipsData.Add(buf);
                }

                // Pack mip maps to the storage container
                ContentStorageHeader storageHeader;
                ContentStorage.Create(assetManager, dataUrl, mipsData, out storageHeader);

                if (cancellationToken.IsCancellationRequested)
                {
                    return(ResultStatus.Cancelled);
                }

                // Serialize texture to file
                var outputTexture = new TextureSerializationData(outputImage, true, storageHeader);
                assetManager.Save(convertParameters.OutputUrl, outputTexture.ToSerializableVersion(), typeof(Texture));

                commandContext.Logger.Verbose($"Compression successful [{dataUrl}] to ({outputImage.Description.Width}x{outputImage.Description.Height},{outputImage.Description.Format})");
            }

            return(ResultStatus.Successful);
        }