Пример #1
0
        public unsafe bool WriteOutput(MpegConverterSettings settings)
        {
            AudioFileDataHeader audioFileDataHeader = new AudioFileDataHeader
            {
                CompressionType = AudioFileDataCompressionType.EALayer3_EL31,
                Channels        = settings.NumberOfChannels == 1 ? AudioFileDataChannels.Mono : AudioFileDataChannels.Stereo,
                SampleRate      = (short)settings.SampleRate
            };

            Endian.ByteSwapInt16((ushort *)&audioFileDataHeader.SampleRate);
            MpegFrameHeader frameHeader;

            while ((frameHeader = ReadFrameHeader()) != null)
            {
                BitStreamWriter writer = new BitStreamWriter();
                writer.Write(0, 8);
                writer.Write((byte)frameHeader.Version, 2);
                writer.Write(frameHeader.SampleRateIndex, 2);
                writer.Write((byte)frameHeader.ChannelMode, 2);
                writer.Write(frameHeader.ModeExtension, 2);
                // writer.Write(); // TODO: is granule index == 1
                // if granule index == 1 && version == mv_1
                //   for channels -> 4 bits scfsi
                // for channels -> channel info 12 + 32 + x bits
                // add up data size
                // data size += 8 - size % 8
                // data size /= 8
                // if data size > 0
                // copy data
            }
            return(true);
        }
Пример #2
0
        public unsafe bool WriteOutput(BlockInfo info, MpegConverterSettings settings)
        {
            AudioFileDataHeader audioFileDataHeader = new AudioFileDataHeader
            {
                CompressionType = AudioFileDataCompressionType.EALayer3_EL31,
                Channels        = settings.NumberOfChannels == 1 ? AudioFileDataChannels.Mono : AudioFileDataChannels.Stereo,
                SampleRate      = (short)settings.SampleRate,
                NumberOfSamples = info.NumberOfSamples,
                Size            = info.Size
            };

            Endian.ByteSwapInt16((ushort *)&audioFileDataHeader.SampleRate);
            Endian.ByteSwapInt32((uint *)&audioFileDataHeader.NumberOfSamples);
            Endian.ByteSwapInt32((uint *)&audioFileDataHeader.Size);
            if (settings.IsStreamed)
            {
                byte[] buffer = new byte[2];
                _outputSnr.Write(buffer, 0, buffer.Length);
            }
            else
            {
                byte[] buffer = new byte[sizeof(AudioFileDataHeader)];
                fixed(byte *pBuffer = &buffer[0])
                {
                    MarshalUtil.CopyMemory((IntPtr)pBuffer, (IntPtr)(byte *)&audioFileDataHeader, sizeof(AudioFileDataHeader));
                }

                _outputSnr.Write(buffer, 0, buffer.Length);
            }
            MpegFrameHeader frameHeader;

            while ((frameHeader = ReadFrameHeader()) != null)
            {
                BitStreamWriter writer = new BitStreamWriter();
                writer.Write(0, 8);
                writer.Write((byte)frameHeader.Version, 2);
                writer.Write(frameHeader.SampleRateIndex, 2);
                writer.Write((byte)frameHeader.ChannelMode, 2);
                writer.Write(frameHeader.ModeExtension, 2);
                // read frame data
                writer.Write(); // TODO: is granule index == 1
                // if granule index == 1 && version == mv_1
                //   for channels -> 4 bits scfsi
                // for channels -> channel info 12 + 32 + x bits
                // add up data size
                // data size += 8 - size % 8
                // data size /= 8
                // if data size > 0
                // copy data
            }
            return(true);
        }
Пример #3
0
        public MpegConverterSettings GetSettings()
        {
            MpegConverterSettings result = new MpegConverterSettings();

            // at the moment we ignore everything but IsStreamed & channels anyway, we assume channels are same for every mp3 header
            byte[] header     = new byte[10];
            long   frameStart = _input.Position;

            _input.Read(header, 0, 10);
            _input.Seek(frameStart, SeekOrigin.Begin);
            if (header[0] == 'I' && header[1] == 'D' && header[2] == '3')
            {
                SkipID3Tag(header);
            }
            while (header[0] != 0xFF)
            {
                frameStart = _input.Position;
                int value = _input.ReadByte();
                if (value == -1)
                {
                    throw new BinaryAssetBuilderException(ErrorCode.InvalidArgument, "Could not find MP3 frame header.");
                }
                header[0] = (byte)value;
            }
            _input.Read(header, 1, 9);
            _input.Seek(frameStart, SeekOrigin.Begin);
            MpegFrameHeader frameHeader = new MpegFrameHeader();

            if (!ParseMpegFrameHeader(frameHeader, header))
            {
                throw new BinaryAssetBuilderException(ErrorCode.InvalidArgument, "Could not parse MP3 frame header.");
            }
            result.NumberOfChannels = frameHeader.NumberOfChannels;
            result.SampleRate       = frameHeader.SampleRate;
            return(result);
        }
Пример #4
0
        private unsafe bool EncodeEALayer3(InstanceDeclaration declaration, AudioFile audioFile, out AssetBuffer result)
        {
            if (_platform != TargetPlatform.Win32)
            {
                throw new InvalidOperationException("Critical: EALayer3 audio compiler should not be called on non Win32 platforms.");
            }
            if (audioFile.PCCompression != PCAudioCompressionSetting.EALAYER3)
            {
                throw new InvalidOperationException("Critical: EALayer3 audio compiler should not be called on non EALayer3 compression.");
            }
            AudioFileRuntime *audioFileRuntime;
            Tracker           tracker  = new Tracker(&audioFileRuntime, _platform == TargetPlatform.Xbox360);
            string            filePath = audioFile.File;

            if (!filePath.EndsWith(".mp3"))
            {
                throw new InvalidOperationException("Critical: EALayer3 audio compiler should not be called on non MP3 files.");
            }
            bool isPathEndingWithSound = Path.GetDirectoryName(filePath).ToLower().EndsWith("\\sounds"); // flag2
            bool?isStreamedBox         = audioFile.IsStreamedOnPC;                                       // flagPtr
            int? sampleRateBox         = audioFile.PCSampleRate;                                         // numPtr1
            int  quality = audioFile.PCQuality;                                                          // num3
            // compressionType, 1 == none, 29 == xas, 28 == xma (xbox, will actually set to none or same as xas depending on the isPathEndingWithSound flag)
            MpegConverterCompressionType compression;                                                    // num4

            switch (audioFile.PCCompression)
            {
            case PCAudioCompressionSetting.NONE:
                compression = !isPathEndingWithSound ? MpegConverterCompressionType.None : MpegConverterCompressionType.XAS;
                break;

            case PCAudioCompressionSetting.XAS:
                compression = MpegConverterCompressionType.XAS;
                break;

            case PCAudioCompressionSetting.EALAYER3:
                compression = MpegConverterCompressionType.EALayer3;
                break;

            default:
                throw new BinaryAssetBuilderException(ErrorCode.InternalError, "Internal error: xml compiler returned bad PC audio compression type of {0}.", audioFile.PCCompression);
            }
            bool isStreamed = isStreamedBox ?? !isPathEndingWithSound; // flag1

            if (audioFile.SubtitleStringName != null)
            {
                Relo.Marshaler.Marshal(audioFile.SubtitleStringName, &audioFileRuntime->SubtitleStringName, tracker);
            }
            else
            {
                Relo.Marshaler.Marshal($"DIALOGEVENT:{Path.GetFileNameWithoutExtension(filePath)}SubTitle", &audioFileRuntime->SubtitleStringName, tracker);
            }
            audioFileRuntime->NumberOfChannels = 0;
            audioFileRuntime->NumberOfSamples  = 0;
            audioFileRuntime->SampleRate       = 0;
            audioFileRuntime->HeaderData       = IntPtr.Zero;
            audioFileRuntime->HeaderDataSize   = 0;
            // identify format (SIMEX_id(streamPtr))            - we don't need that, though we might want to check for mp3
            // check format needs to be 0 or 1 aka WAVE or AIFF - we don't need that, though we might want to check for mp3
            // open audio file (SIMEX_open(streamPtr, &instancePtr, format)
            using (MpegConverter converter = new MpegConverter(filePath))
            {
                string tempFile = declaration.CustomDataPath + _tempFilenameSuffix; // str2
                _tracer.TraceNote("Creating temp file {0}", tempFile);
                using (AutoCleanUpTempFiles cleanUpTempFiles = new AutoCleanUpTempFiles(tempFile))
                {
                    // set temp filename SIMEX_setfilename(str2ptr)
                    converter.SetOutputFilePath(tempFile);
                    // create temp output file SIMEX_create(null, &instancePtr, 39)
                    try
                    {
                        converter.CreateOutputFiles();
                        MpegConverterSettings converterSettings = converter.GetSettings();
                        // start iterating through elements (num elements is returned by SIMEX_open)
                        // get info SIMEX_info(instancePtr, &infoPtr, idx)
                        // read element SIMEX_read(instancePtr, infoPtr, idx)
                        if (isStreamed)
                        {
                            // infoPtr + 526 = 0x1000;
                            converterSettings.IsStreamed = true;
                            _tracer.TraceNote("Setting play location to streamed.");
                        }
                        else
                        {
                            // infoPtr + 526 = 0x0800;
                            converterSettings.IsStreamed = false;
                            _tracer.TraceNote("Setting play location to RAM.");
                        }
                        // infoPtr + 509 = compression;
                        converterSettings.CompressionType = compression;
                        _tracer.TraceNote("Setting compression type to {0}.", compression); // SIMEX_getsamplerepname(compression)
                        if (compression == MpegConverterCompressionType.XMA || compression == MpegConverterCompressionType.EALayer3)
                        {
                            if (quality < 0 || quality > 100)
                            {
                                throw new BinaryAssetBuilderException(ErrorCode.InvalidArgument, "Audio file {0}: Quality parameter must be between 0 and 100.", declaration);
                            }
                            // infoPtr + 536 = quality
                            converterSettings.CompressionQuality = quality;
                            _tracer.TraceNote("Setting compression quality to {0}.", quality);
                        }
                        // if (sampleRateBox.HasValue && sampleRateBox.Value != *(int*)(*(infoPtr + 540))
                        if (sampleRateBox.HasValue && sampleRateBox.Value != converterSettings.SampleRate)
                        {
                            int sampleRate = sampleRateBox.Value;
                            if (sampleRate < 400 || sampleRate > 96000)
                            {
                                throw new BinaryAssetBuilderException(ErrorCode.InvalidArgument, "Audio file {0}: Sample rate must be between 400 and 96000.", declaration);
                            }
                            _tracer.TraceNote("Resampling from {0} to {1}", converterSettings.SampleRate, sampleRate); // infoPtr + 540
                            // TODO: resampling
                            _tracer.TraceWarning("Warning: Resampling is currently not implemented.");
                        }
                        // if (*(int*)infoPtr != 0)
                        audioFileRuntime->NumberOfChannels = (byte)converterSettings.NumberOfChannels; // infoPtr + 510
                        audioFileRuntime->NumberOfSamples  = converterSettings.NumberOfSamples;        // infoPtr + 544
                        audioFileRuntime->SampleRate       = converterSettings.SampleRate;             // infoPtr + 540
                        if (audioFileRuntime->NumberOfChannels != 1 && audioFileRuntime->NumberOfChannels != 2 && audioFileRuntime->NumberOfChannels != 4 && audioFileRuntime->NumberOfChannels != 6)
                        {
                            _tracer.TraceWarning("Warning: Audio file {0} has {1} channels. The only supported channel counts are 1, 2, 4, and 6; sample will probably use only the first channel in the engine.", declaration, audioFileRuntime->NumberOfChannels);
                        }
                        // TODO if (!SIMEX_write(tmpFile, infoPtr, idx))
                        if (converter.WriteOutput(converterSettings))
                        {
                            throw new BinaryAssetBuilderException(ErrorCode.InternalError, "Internal error writing element of \"{0}\".", tempFile);
                        }
                    }
                    finally
                    {
                        converter.CloseOutputFiles();
                    }
                    string tempFileSnr = tempFile + ".snr";
                    if (isStreamed)
                    {
                        string tempFileSns = tempFile + ".sns";
                        if (File.Exists(declaration.CustomDataPath))
                        {
                            File.Delete(declaration.CustomDataPath);
                        }
                        _tracer.TraceNote("Creating output file {0}\n", declaration.CustomDataPath);
                        File.Move(tempFileSns, declaration.CustomDataPath);
                        using (Stream headerStream = new FileStream(tempFileSnr, FileMode.Open, FileAccess.ReadWrite, FileShare.None))
                        {
                            int     length     = (int)headerStream.Length;
                            sbyte **headerData = (sbyte **)audioFileRuntime->HeaderData;
                            tracker.Push((void **)headerData, 1, length);
                            headerStream.Read((IntPtr)(*(int *)audioFileRuntime->HeaderData), length);
                            audioFileRuntime->HeaderDataSize = length;
                            tracker.Pop();
                        }
                    }
                    else
                    {
                        if (File.Exists(declaration.CustomDataPath))
                        {
                            File.Delete(declaration.CustomDataPath);
                        }
                        _tracer.TraceNote("Creating output file {0}\n", declaration.CustomDataPath);
                        File.Move(tempFileSnr, declaration.CustomDataPath);
                    }
                    if (tracker.IsBigEndian)
                    {
                        Tracker.ByteSwap32((uint *)&audioFileRuntime->NumberOfSamples);
                        Tracker.ByteSwap32((uint *)&audioFileRuntime->SampleRate);
                        Tracker.ByteSwap32((uint *)&audioFileRuntime->HeaderDataSize);
                    }
                    result = new AssetBuffer();
                    FinalizeTracker(tracker, result);
                }
            }
            return(true);
        }