Example #1
0
        /// <summary>
        ///   Creates a new instance of this class.
        /// </summary>
        /// <param name="task">Task associated with this recording session.</param>
        /// <param name="region">Effective region.</param>
        internal RecordingSession(Task task, Rectangle region)
        {
            this.region = region;
            this.task   = task;

            try {
                // get uninitialized object in case we need to set Options property first
                this.codec = Activator.CreateInstance(Type.GetType(task.Codec.CodecType, true, true) ??
                                                      throw new InvalidOperationException("No such codec loaded.")) as
                             IVideoCodec;

                if (task.Codec.Options is Dictionary <string, object> userOptions &&
                    this.codec is IHasOptions configurableObject)
                {
                    // set user options
                    configurableObject.Options = configurableObject.Options ?? new Dictionary <string, object>();

                    foreach (KeyValuePair <string, object> pair in userOptions)
                    {
                        configurableObject.Options[pair.Key] = pair.Value;
                    }
                }
            } catch (Exception exception) {
                Log.WriteLine(LogLevel.Error, $"error initializing codec {task.Codec.CodecType}: {exception}");
                throw new TaskException(Resources.TaskHelper_EncodingFailedCaption,
                                        Resources.TaskHelper_EncodingInitializationFailedContent,
                                        exception);
            }
        }
Example #2
0
        public virtual int sceVideocodecDelete()
        {
            if (videocodecDecoderThread != null)
            {
                videocodecDecoderThread.exit();
                videocodecDecoderThread = null;
            }

            if (videoCodec != null)
            {
                videoCodec = null;
            }

            if (memoryInfo != null)
            {
                Modules.SysMemUserForUserModule.free(memoryInfo);
                memoryInfo = null;
            }

            if (edramInfo != null)
            {
                Modules.SysMemUserForUserModule.free(edramInfo);
                edramInfo = null;
            }

            Modules.ThreadManForUserModule.hleKernelDelayThread(videocodecDeleteDelay, false);

            return(0);
        }
Example #3
0
        public MediaController(MediaConfig config, AudioFormat playedAudioFormat, MediaStatistics mediaStats, IMediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController)
        {
            // Initialize the class variables.
            _mediaEnvironment                  = mediaEnvironment;
            MediaConfig                        = config;
            MediaStats                         = mediaStats;
            MediaConnection                    = mediaConnection;
            VideoQualityController             = videoQualityController;
            MediaConnection.AudioPacketHandler = HandleAudioPacket;
            MediaConnection.VideoPacketHandler = HandleVideoPacket;

            Logger         = new MediaControllerLogger(VideoQualityController, MediaStats);
            _localSsrcId   = config.LocalSsrcId;
            RemoteSessions = new Dictionary <ushort, VideoThreadData>();
            VideoQualityController.RemoteSessions = RemoteSessions;
            PlayedAudioFormat = playedAudioFormat;

            _silentBytes        = new byte[PlayedAudioFormat.BytesPerFrame];
            _decodedFrameBuffer = new short[PlayedAudioFormat.SamplesPerFrame * 10];             // Make room for 10 frames.

            _codecFactory = config.CodecFactory;
            _videoEncoder = _codecFactory.GetVideoEncoder(VideoQualityController, MediaStats);

            // Instantiate the audio jitter class
            _audioJitter = new AudioJitterQueue(_codecFactory, VideoQualityController, MediaStats);
            _audioJitter.CodecTypeChanged += audioJitter_CodecTypeChanged;

            _audioDecodeBuffer = new byte[VideoConstants.MaxPayloadSize];
            _audioSendBuffer   = new ByteStream(RtpPacketData.DataPacketMaxLength);

            // Spin up the various audio and video encoding threads.
            // On multiprocessor machines, these can spread the load, but even on single-processor machines it helps a great deal
            // if the various audio and video sinks can return immediately.
            _audioEncodeResetEvent  = new ManualResetEvent(false);
            _audioEncodeThread      = new Thread(TransmitAudio);
            _audioEncodeThread.Name = "MediaController.TransmitAudio";
            _audioEncodeThread.Start();
            _videoEncodeResetEvent    = new ManualResetEvent(false);
            _videoTransmitThread      = new Thread(TransmitVideo);
            _videoTransmitThread.Name = "MediaController.TransmitVideo";
            _videoTransmitThread.Start();

            // Create the object pools that will help us reduce time spent in garbage collection.
            _videoBufferPool  = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize * 2), bs => bs.Reset());
            _packetBufferPool = new ObjectPool <ByteStream>(() => new ByteStream(RtpPacketData.DataPacketMaxLength), bs => bs.Reset());
            _videoChunkPool   = new ObjectPool <Chunk>(() => new Chunk {
                Payload = new ByteStream(VideoConstants.MaxPayloadSize * 2)
            }, chunk => { chunk.SsrcId = 0; chunk.Payload.Reset(); });

            AudioStats = new ObservableCollection <AudioStatistics>();

            _speakerStatistics    = new AudioStatistics("Volume:Sent to Speaker", MediaStats);
            _microphoneStatistics = new AudioStatistics("Volume:Received from Microphone", MediaStats);
            _cancelledStatistics  = new AudioStatistics("Volume:Echo Cancelled", MediaStats);

            AudioStats.Add(_speakerStatistics);
            AudioStats.Add(_microphoneStatistics);
            AudioStats.Add(_cancelledStatistics);
        }
Example #4
0
 public VideoThreadData(ushort ssrcId, IVideoCodec decoder)
 {
     SsrcId          = ssrcId;
     VideoChunkQueue = new Queue <Chunk>(VideoConstants.MaxQueuedBlocksPerStream * 2); // Make it bigger so it never has to get resized.
     ResetEvent      = new ManualResetEvent(false);
     Decoder         = decoder;
     Validator       = new RemoteCameraValidatorEntity();
 }
Example #5
0
 public TestInstance(IVideoQualityController videoQualityController, IVideoCodec videoCodec, List <byte[]> rawFrames, List <byte[]> processedFrames, ObjectPool <ByteStream> videoChunkPool)
 {
     VideoQualityController = videoQualityController;
     VideoCodec             = videoCodec;
     RawFrames       = rawFrames;
     ProcessedFrames = processedFrames;
     RawSize         = RawFrames.Count * RawFrames[0].Length;
     mVideoChunkPool = videoChunkPool;
 }
Example #6
0
        private void closeVideo()
        {
            videoCodec = null;

            if (@is != null)
            {
                try
                {
                    @is.Close();
                }
                catch (IOException)
                {
                    // Ignore Exception
                }
                @is = null;
            }
        }
Example #7
0
        private bool startVideo()
        {
            endOfVideo = false;

            if (!readPsmfHeader())
            {
                return(false);
            }

            videoCodec = CodecFactory.VideoCodec;
            videoCodec.init(null);

            startTime = DateTimeHelper.CurrentUnixTimeMillis();
            frame     = 0;

            return(true);
        }
Example #8
0
        private void hleVideocodecDecoderStep(TPointer buffer, int type, int threadUid, long threadWakeupMicroTime)
        {
            if (buffer == null)
            {
                return;
            }

            int mp4Data = buffer.getValue32(36) | MemoryMap.START_RAM;
            int mp4Size = buffer.getValue32(40);

            if (log.TraceEnabled)
            {
                log.trace(string.Format("sceVideocodecDecode mp4Data:{0}", Utilities.getMemoryDump(mp4Data, mp4Size)));
            }

            if (videoCodec == null)
            {
                videoCodec = CodecFactory.VideoCodec;
                videoCodec.init(null);
            }

            int[]         mp4Buffer    = getIntBuffer(mp4Size);
            IMemoryReader memoryReader = MemoryReader.getMemoryReader(mp4Data, mp4Size, 1);

            for (int i = 0; i < mp4Size; i++)
            {
                mp4Buffer[i] = memoryReader.readNext();
            }

            int result = videoCodec.decode(mp4Buffer, 0, mp4Size);

            //if (log.DebugEnabled)
            {
                Console.WriteLine(string.Format("sceVideocodecDecode videoCodec returned 0x{0:X} from 0x{1:X} data bytes", result, mp4Size));
            }

            releaseIntBuffer(mp4Buffer);

            buffer.setValue32(8, 0);

            int frameWidth  = videoCodec.ImageWidth;
            int frameHeight = videoCodec.ImageHeight;

            if (log.TraceEnabled)
            {
                log.trace(string.Format("sceVideocodecDecode codec image size {0:D}x{1:D}, frame size {2:D}x{3:D}", videoCodec.ImageWidth, videoCodec.ImageHeight, frameWidth, frameHeight));
            }
            int frameBufferWidthY  = videoCodec.ImageWidth;
            int frameBufferWidthCr = frameBufferWidthY / 2;
            int frameBufferWidthCb = frameBufferWidthY / 2;

            Memory   mem     = buffer.Memory;
            TPointer buffer2 = buffer.getPointer(16);

            switch (type)
            {
            case 0:
                buffer2.setValue32(8, frameWidth);
                buffer2.setValue32(12, frameHeight);
                buffer2.setValue32(28, 1);
                buffer2.setValue32(32, videoCodec.hasImage());
                buffer2.setValue32(36, !videoCodec.hasImage());

                if (videoCodec.hasImage())
                {
                    if (memoryInfo == null)
                    {
                        int sizeY1  = alignUp(((frameWidth + 16) >> 5) * (frameHeight >> 1) * 16, 0x1FF);
                        int sizeY2  = alignUp((frameWidth >> 5) * (frameHeight >> 1) * 16, 0x1FF);
                        int sizeCr1 = alignUp(((frameWidth + 16) >> 5) * (frameHeight >> 1) * 8, 0x1FF);
                        int sizeCr2 = alignUp((frameWidth >> 5) * (frameHeight >> 1) * 8, 0x1FF);
                        int size    = 256 + (sizeY1 + sizeY2 + sizeCr1 + sizeCr2) * 2 * buffers.Length;

                        memoryInfo = Modules.SysMemUserForUserModule.malloc(SysMemUserForUser.KERNEL_PARTITION_ID, "sceVideocodecDecode", SysMemUserForUser.PSP_SMEM_Low, size, 0);

                        int @base = memoryInfo.addr;

                        bufferUnknown1 = @base;
                        mem.memset(bufferUnknown1, (sbyte)0, 36);

                        bufferUnknown2 = @base + 36;
                        mem.memset(bufferUnknown2, (sbyte)0, 32);

                        int yuvBuffersBase = @base + 256;                                 // Add 256 to keep aligned
                        int base1          = yuvBuffersBase & EDRAM_MEMORY_MASK;
                        int base2          = base1 + (sizeY1 + sizeY2) * buffers.Length;
                        int step           = (sizeY1 + sizeY2 + sizeCr1 + sizeCr2) * buffers.Length;
                        for (int i = 0; i < buffers.Length; i++)
                        {
                            buffers[i][0] = base1;
                            buffers[i][1] = buffers[i][0] + step;
                            buffers[i][2] = base1 + sizeY1;
                            buffers[i][3] = buffers[i][2] + step;
                            buffers[i][4] = base2;
                            buffers[i][5] = buffers[i][4] + step;
                            buffers[i][6] = base2 + sizeCr1;
                            buffers[i][7] = buffers[i][6] + step;

                            base1 += sizeY1 + sizeY2;
                            base2 += sizeCr1 + sizeCr2;
                        }
                    }

                    int buffersIndex = frameCount % 3;
                    int width        = videoCodec.ImageWidth;
                    int height       = videoCodec.ImageHeight;

                    int[] luma = getIntBuffer(width * height);
                    int[] cb   = getIntBuffer(width * height / 4);
                    int[] cr   = getIntBuffer(width * height / 4);
                    if (videoCodec.getImage(luma, cb, cr) == 0)
                    {
                        // The PSP is storing the YCbCr information in a non-linear format.
                        // By analyzing the output of sceMpegBaseYCrCbCopy on a real PSP,
                        // the following format for the YCbCr was found:
                        // the image is divided vertically into bands of 32 pixels.
                        // Each band is stored vertically into different buffers.
                        // The Y information is stored as 1 byte per pixel.
                        // The Cb information is stored as 1 byte for a square of four pixels (2x2).
                        // The Cr information is stored as 1 byte for a square of four pixels (2x2).
                        // For a square of four pixels, the one Cb byte is stored first,
                        // followed by the one Cr byte.
                        //
                        // - buffer0:
                        //     storing the Y information of the first block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=0,y=0),
                        //     16 horizontal pixels are stored sequentially in the buffer,
                        //     followed by 16 pixels of the next next image row (i.e. every 2nd row).
                        //     The rows are stored from the image top to the image bottom.
                        //     [x=0-15,y=0], [x=0-15,y=2], [x=0-15,y=4]...
                        //     [x=32-47,y=0], [x=32-47,y=2], [x=32-47,y=4]...
                        //     [x=64-79,y=0], [x=64-79,y=2], [x=64-79,y=4]...
                        // - buffer1:
                        //     storing the Y information of the second block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=16,y=0),
                        //     16 horizontal pixels are stored sequentially in the buffer,
                        //     followed by 16 pixels of the next next image row (i.e. every 2nd row).
                        //     The rows are stored from the image top to the image bottom.
                        //     [x=16-31,y=0], [x=16-31,y=2], [x=16-31,y=4]...
                        //     [x=48-63,y=0], [x=48-63,y=2], [x=48-63,y=4]...
                        //     [x=80-95,y=0], [x=80-95,y=2], [x=80-95,y=4]...
                        // - buffer2:
                        //     storing the Y information of the first block of 16 pixels
                        //     of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=0,y=1),
                        //     16 horizontal pixels are stored sequentially in the buffer,
                        //     followed by 16 pixels of the next next image row (i.e. every 2nd row).
                        //     The rows are stored from the image top to the image bottom.
                        //     [x=0-15,y=1], [x=0-15,y=3], [x=0-15,y=5]...
                        //     [x=32-47,y=1], [x=32-47,y=3], [x=32-47,y=5]...
                        //     [x=64-79,y=1], [x=64-79,y=3], [x=64-79,y=5]...
                        // - buffer3:
                        //     storing the Y information of the second block of 16 pixels
                        //     of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=16,y=1),
                        //     16 horizontal pixels are stored sequentially in the buffer,
                        //     followed by 16 pixels of the next next image row (i.e. every 2nd row).
                        //     The rows are stored from the image top to the image bottom.
                        //     [x=16-31,y=1], [x=16-31,y=3], [x=16-31,y=5]...
                        //     [x=48-63,y=1], [x=48-63,y=3], [x=48-63,y=5]...
                        //     [x=80-95,y=1], [x=80-95,y=3], [x=80-95,y=5]...
                        // - buffer4:
                        //     storing the Cb and Cr information of the first block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=0,y=0),
                        //     8 byte pairs of (Cb,Cr) are stored sequentially in the buffer
                        //     (representing 16 horizontal pixels),
                        //     then the next 3 rows are being skipped,
                        //     and then followed by 8 byte pairs of the next image row (i.e. every 4th row).
                        //     The rows are stored from the image top to the image bottom.
                        //     CbCr[x=0,y=0], CbCr[x=2,y=0], CbCr[x=4,y=0], CbCr[x=6,y=0], CbCr[x=8,y=0], CbCr[x=10,y=0], CbCr[x=12,y=0], CbCr[x=14,y=0]
                        //     CbCr[x=32,y=0], CbCr[x=34,y=0], CbCr[x=36,y=0], CbCr[x=38,y=0], CbCr[x=40,y=0], CbCr[x=42,y=0], CbCr[x=44,y=0], CbCr[x=46,y=0]
                        //     ...
                        //     CbCr[x=0,y=4], CbCr[x=2,y=4], CbCr[x=4,y=4], CbCr[x=6,y=4], CbCr[x=8,y=4], CbCr[x=10,y=4], CbCr[x=12,y=4], CbCr[x=14,y=4]
                        //     CbCr[x=32,y=4], CbCr[x=34,y=4], CbCr[x=36,y=4], CbCr[x=38,y=4], CbCr[x=40,y=4], CbCr[x=42,y=4], CbCr[x=44,y=4], CbCr[x=46,y=4]
                        //     ...
                        // - buffer5:
                        //     storing the Cb and Cr information of the first block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=0,y=2),
                        //     8 byte pairs of (Cb,Cr) are stored sequentially in the buffer
                        //     (representing 16 horizontal pixels),
                        //     then the next 3 rows are being skipped,
                        //     and then followed by 8 byte pairs of the next image row (i.e. every 4th row).
                        //     The rows are stored from the image top to the image bottom.
                        //     CbCr[x=0,y=2], CbCr[x=2,y=2], CbCr[x=4,y=2], CbCr[x=6,y=2], CbCr[x=8,y=2], CbCr[x=10,y=2], CbCr[x=12,y=2], CbCr[x=14,y=2]
                        //     CbCr[x=32,y=2], CbCr[x=34,y=2], CbCr[x=36,y=2], CbCr[x=38,y=2], CbCr[x=40,y=2], CbCr[x=42,y=2], CbCr[x=44,y=2], CbCr[x=46,y=2]
                        //     ...
                        //     CbCr[x=0,y=6], CbCr[x=2,y=6], CbCr[x=4,y=6], CbCr[x=6,y=6], CbCr[x=8,y=6], CbCr[x=10,y=6], CbCr[x=12,y=6], CbCr[x=14,y=6]
                        //     CbCr[x=32,y=6], CbCr[x=34,y=6], CbCr[x=36,y=6], CbCr[x=38,y=6], CbCr[x=40,y=6], CbCr[x=42,y=6], CbCr[x=44,y=6], CbCr[x=46,y=6]
                        //     ...
                        // - buffer6:
                        //     storing the Cb and Cr information of the second block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=16,y=0),
                        //     8 byte pairs of (Cb,Cr) are stored sequentially in the buffer
                        //     (representing 16 horizontal pixels),
                        //     then the next 3 rows are being skipped,
                        //     and then followed by 8 byte pairs of the next image row (i.e. every 4th row).
                        //     The rows are stored from the image top to the image bottom.
                        //     CbCr[x=16,y=0], CbCr[x=18,y=0], CbCr[x=20,y=0], CbCr[x=22,y=0], CbCr[x=24,y=0], CbCr[x=26,y=0], CbCr[x=28,y=0], CbCr[x=30,y=0]
                        //     CbCr[x=48,y=0], CbCr[x=50,y=0], CbCr[x=52,y=0], CbCr[x=54,y=0], CbCr[x=56,y=0], CbCr[x=58,y=0], CbCr[x=60,y=0], CbCr[x=62,y=0]
                        //     ...
                        //     CbCr[x=16,y=4], CbCr[x=18,y=4], CbCr[x=20,y=4], CbCr[x=22,y=4], CbCr[x=24,y=4], CbCr[x=26,y=4], CbCr[x=28,y=4], CbCr[x=30,y=4]
                        //     CbCr[x=48,y=4], CbCr[x=50,y=4], CbCr[x=52,y=4], CbCr[x=54,y=4], CbCr[x=56,y=4], CbCr[x=58,y=4], CbCr[x=60,y=4], CbCr[x=62,y=4]
                        //     ...
                        // - buffer7:
                        //     storing the Cb and Cr information of the second block
                        //     of 16 pixels of a 32 pixels wide vertical band.
                        //     Starting at the image pixel (x=16,y=2),
                        //     8 byte pairs of (Cb,Cr) are stored sequentially in the buffer
                        //     (representing 16 horizontal pixels),
                        //     then the next 3 rows are being skipped,
                        //     and then followed by 8 byte pairs of the next image row (i.e. every 4th row).
                        //     The rows are stored from the image top to the image bottom.
                        //     CbCr[x=16,y=2], CbCr[x=18,y=2], CbCr[x=20,y=2], CbCr[x=22,y=2], CbCr[x=24,y=2], CbCr[x=26,y=2], CbCr[x=28,y=2], CbCr[x=30,y=2]
                        //     CbCr[x=48,y=2], CbCr[x=50,y=2], CbCr[x=52,y=2], CbCr[x=54,y=2], CbCr[x=56,y=2], CbCr[x=58,y=2], CbCr[x=60,y=2], CbCr[x=62,y=2]
                        //     ...
                        //     CbCr[x=16,y=6], CbCr[x=18,y=6], CbCr[x=20,y=6], CbCr[x=22,y=6], CbCr[x=24,y=6], CbCr[x=26,y=6], CbCr[x=28,y=6], CbCr[x=30,y=6]
                        //     CbCr[x=48,y=6], CbCr[x=50,y=6], CbCr[x=52,y=6], CbCr[x=54,y=6], CbCr[x=56,y=6], CbCr[x=58,y=6], CbCr[x=60,y=6], CbCr[x=62,y=6]
                        //     ...
                        int width2    = width / 2;
                        int height2   = height / 2;
                        int sizeY1    = ((width + 16) >> 5) * (height >> 1) * 16;
                        int sizeY2    = (width >> 5) * (height >> 1) * 16;
                        int sizeCrCb1 = sizeY1 >> 1;
                        int sizeCrCb2 = sizeY1 >> 1;

                        int[] bufferY1 = getIntBuffer(sizeY1);
                        for (int x = 0, j = 0; x < width; x += 32)
                        {
                            for (int y = 0, i = x; y < height; y += 2, j += 16, i += 2 * width)
                            {
                                Array.Copy(luma, i, bufferY1, j, 16);
                            }
                        }
                        write(buffers[buffersIndex][0] | MemoryMap.START_RAM, sizeY1, bufferY1, 0);

                        int[] bufferY2 = getIntBuffer(sizeY2);
                        for (int x = 16, j = 0; x < width; x += 32)
                        {
                            for (int y = 0, i = x; y < height; y += 2, j += 16, i += 2 * width)
                            {
                                Array.Copy(luma, i, bufferY2, j, 16);
                            }
                        }
                        write(buffers[buffersIndex][1] | MemoryMap.START_RAM, sizeY2, bufferY2, 0);

                        int[] bufferCrCb1 = getIntBuffer(sizeCrCb1);
                        for (int x = 0, j = 0; x < width2; x += 16)
                        {
                            for (int y = 0; y < height2; y += 2)
                            {
                                for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++)
                                {
                                    bufferCrCb1[j++] = cb[i];
                                    bufferCrCb1[j++] = cr[i];
                                }
                            }
                        }
                        write(buffers[buffersIndex][4] | MemoryMap.START_RAM, sizeCrCb1, bufferCrCb1, 0);

                        int[] bufferCrCb2 = getIntBuffer(sizeCrCb2);
                        for (int x = 0, j = 0; x < width2; x += 16)
                        {
                            for (int y = 1; y < height2; y += 2)
                            {
                                for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++)
                                {
                                    bufferCrCb2[j++] = cb[i];
                                    bufferCrCb2[j++] = cr[i];
                                }
                            }
                        }
                        write(buffers[buffersIndex][5] | MemoryMap.START_RAM, sizeCrCb2, bufferCrCb2, 0);

                        for (int x = 0, j = 0; x < width; x += 32)
                        {
                            for (int y = 1, i = x + width; y < height; y += 2, j += 16, i += 2 * width)
                            {
                                Array.Copy(luma, i, bufferY1, j, 16);
                            }
                        }
                        write(buffers[buffersIndex][2] | MemoryMap.START_RAM, sizeY1, bufferY1, 0);
                        releaseIntBuffer(bufferY1);

                        for (int x = 16, j = 0; x < width; x += 32)
                        {
                            for (int y = 1, i = x + width; y < height; y += 2, j += 16, i += 2 * width)
                            {
                                Array.Copy(luma, i, bufferY2, j, 16);
                            }
                        }
                        write(buffers[buffersIndex][3] | MemoryMap.START_RAM, sizeY2, bufferY2, 0);
                        releaseIntBuffer(bufferY2);

                        for (int x = 8, j = 0; x < width2; x += 16)
                        {
                            for (int y = 0; y < height2; y += 2)
                            {
                                for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++)
                                {
                                    bufferCrCb1[j++] = cb[i];
                                    bufferCrCb1[j++] = cr[i];
                                }
                            }
                        }
                        write(buffers[buffersIndex][6] | MemoryMap.START_RAM, sizeCrCb1, bufferCrCb1, 0);
                        releaseIntBuffer(bufferCrCb1);

                        for (int x = 8, j = 0; x < width2; x += 16)
                        {
                            for (int y = 1; y < height2; y += 2)
                            {
                                for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++)
                                {
                                    bufferCrCb2[j++] = cb[i];
                                    bufferCrCb2[j++] = cr[i];
                                }
                            }
                        }
                        write(buffers[buffersIndex][7] | MemoryMap.START_RAM, sizeCrCb2, bufferCrCb2, 0);
                        releaseIntBuffer(bufferCrCb2);
                    }
                    releaseIntBuffer(luma);
                    releaseIntBuffer(cb);
                    releaseIntBuffer(cr);

                    TPointer mpegAvcYuvStruct = buffer.getPointer(44);
                    for (int i = 0; i < 8; i++)
                    {
                        mpegAvcYuvStruct.setValue32(i * 4, buffers[buffersIndex][i]);
                        if (log.TraceEnabled)
                        {
                            log.trace(string.Format("sceVideocodecDecode YUV buffer[{0:D}]=0x{1:X8}", i, buffers[buffersIndex][i]));
                        }
                    }

                    mpegAvcYuvStruct.setValue32(32, videoCodec.hasImage());                             // 0 or 1

                    mpegAvcYuvStruct.setValue32(36, bufferUnknown1);
                    mem.write8(bufferUnknown1 + 0, (sbyte)0x02);                              // 0x00 or 0x04
                    mem.write32(bufferUnknown1 + 8, sceMpeg.mpegTimestampPerSecond);
                    mem.write32(bufferUnknown1 + 16, sceMpeg.mpegTimestampPerSecond);
                    mem.write32(bufferUnknown1 + 24, frameCount * 2);
                    mem.write32(bufferUnknown1 + 28, 2);
                    mem.write8(bufferUnknown1 + 32, (sbyte)0x00);                              // 0x00 or 0x01 or 0x02
                    mem.write8(bufferUnknown1 + 33, (sbyte)0x01);

                    mpegAvcYuvStruct.setValue32(40, bufferUnknown2);
                    mem.write8(bufferUnknown2 + 0, (sbyte)0x00);                              // 0x00 or 0x04
                    mem.write32(bufferUnknown2 + 24, 0);
                    mem.write32(bufferUnknown2 + 28, 0);

                    TPointer buffer3 = buffer.getPointer(48);
                    buffer3.setValue8(0, (sbyte)0x01);
                    buffer3.setValue8(1, unchecked ((sbyte)0xFF));
                    buffer3.setValue32(4, 3);
                    buffer3.setValue32(8, 4);
                    buffer3.setValue32(12, 1);
                    buffer3.setValue8(16, (sbyte)0);
                    buffer3.setValue32(20, 0x10000);
                    buffer3.setValue32(32, 4004);                             // 4004 or 5005
                    buffer3.setValue32(36, 240000);

                    TPointer decodeSEI = buffer.getPointer(80);
                    decodeSEI.setValue8(0, (sbyte)0x02);
                    decodeSEI.setValue32(8, sceMpeg.mpegTimestampPerSecond);
                    decodeSEI.setValue32(16, sceMpeg.mpegTimestampPerSecond);
                    decodeSEI.setValue32(24, frameCount * 2);
                    decodeSEI.setValue32(28, 2);
                    decodeSEI.setValue8(32, (sbyte)0x00);
                    decodeSEI.setValue8(33, (sbyte)0x01);
                }
                break;

            case 1:
                if (videoCodec.hasImage())
                {
                    if (memoryInfo == null)
                    {
                        int sizeY  = frameBufferWidthY * frameHeight;
                        int sizeCr = frameBufferWidthCr * (frameHeight / 2);
                        int sizeCb = frameBufferWidthCr * (frameHeight / 2);
                        int size   = (sizeY + sizeCr + sizeCb) * 2;

                        memoryInfo = Modules.SysMemUserForUserModule.malloc(SysMemUserForUser.KERNEL_PARTITION_ID, "sceVideocodecDecode", SysMemUserForUser.PSP_SMEM_Low, size, 0);

                        bufferY1  = memoryInfo.addr & EDRAM_MEMORY_MASK;
                        bufferY2  = bufferY1 + sizeY;
                        bufferCr1 = bufferY1 + sizeY;
                        bufferCb1 = bufferCr1 + sizeCr;
                        bufferCr2 = bufferY2 + sizeY;
                        bufferCb2 = bufferCr2 + sizeCr;
                    }
                }

                bool buffer1  = (frameCount & 1) == 0;
                int  bufferY  = buffer1 ? bufferY1 : bufferY2;
                int  bufferCr = buffer1 ? bufferCr1 : bufferCr2;
                int  bufferCb = buffer1 ? bufferCb1 : bufferCb2;

                if (videoCodec.hasImage())
                {
                    mem.memset(bufferY | MemoryMap.START_RAM, unchecked ((sbyte)0x80), frameBufferWidthY * frameHeight);
                    mem.memset(bufferCr | MemoryMap.START_RAM, (sbyte)(buffer1 ? 0x50 : 0x80), frameBufferWidthCr * (frameHeight / 2));
                    mem.memset(bufferCb | MemoryMap.START_RAM, unchecked ((sbyte)0x80), frameBufferWidthCb * (frameHeight / 2));
                }

                buffer2.setValue32(0, mp4Data);
                buffer2.setValue32(4, mp4Size);
                buffer2.setValue32(8, buffer.getValue32(56));
                buffer2.setValue32(12, 0x40);
                buffer2.setValue32(16, 0);
                buffer2.setValue32(44, mp4Size);
                buffer2.setValue32(48, frameWidth);
                buffer2.setValue32(52, frameHeight);
                buffer2.setValue32(60, videoCodec.hasImage() ? 2 : 1);
                buffer2.setValue32(64, 1);
                buffer2.setValue32(72, -1);
                buffer2.setValue32(76, frameCount * 0x64);
                buffer2.setValue32(80, 2997);
                buffer2.setValue32(84, bufferY);
                buffer2.setValue32(88, bufferCr);
                buffer2.setValue32(92, bufferCb);
                buffer2.setValue32(96, frameBufferWidthY);
                buffer2.setValue32(100, frameBufferWidthCr);
                buffer2.setValue32(104, frameBufferWidthCb);
                break;

            default:
                Console.WriteLine(string.Format("sceVideocodecDecode unknown type=0x{0:X}", type));
                break;
            }

            if (videoCodec.hasImage())
            {
                frameCount++;
            }

            IAction action;
            long    delayMicros = threadWakeupMicroTime - Emulator.Clock.microTime();

            if (delayMicros > 0L)
            {
                //if (log.DebugEnabled)
                {
                    Console.WriteLine(string.Format("Further delaying thread=0x{0:X} by {1:D} microseconds", threadUid, delayMicros));
                }
                action = new DelayThreadAction(threadUid, (int)delayMicros, false, true);
            }
            else
            {
                //if (log.DebugEnabled)
                {
                    Console.WriteLine(string.Format("Unblocking thread=0x{0:X}", threadUid));
                }
                action = new UnblockThreadAction(threadUid);
            }
            // The action cannot be executed immediately as we are running
            // in a non-PSP thread. The action has to be executed by the scheduler
            // as soon as possible.
            Emulator.Scheduler.addAction(action);
        }
Example #9
0
        private static void AddProperties(List <MetadataItem> metadata, TagLib.File f)
        {
            if (f.Properties == null)
            {
                return;
            }

            if (f.Properties.MediaTypes != MediaTypes.None)
            {
                TimeSpan duration = f.Properties.Duration;

                if (duration.Ticks > 0)
                {
                    AddData(metadata, MetadataType.DURATION,
                            MetadataUtils.SecsToMetadataDuration(duration.TotalSeconds));
                }
            }

            string[] formats = new string[3];
            int      fmtCount = 0;
            int      w = int.MinValue, h = int.MinValue;
            int      q = int.MinValue;

            foreach (ICodec codec in f.Properties.Codecs)
            {
                if ((codec.MediaTypes & MediaTypes.Video) == TagLib.MediaTypes.Video)
                {
                    IVideoCodec vcodec = codec as IVideoCodec;
                    w = vcodec.VideoWidth;
                    h = vcodec.VideoHeight;

                    if (HasValidData(vcodec.Description))
                    {
                        formats[0] = vcodec.Description;
                        fmtCount++;
                    }
                }

                if ((codec.MediaTypes & MediaTypes.Audio) == TagLib.MediaTypes.Audio)
                {
                    IAudioCodec acodec = codec as IAudioCodec;

                    if (HasValidData(acodec.Description))
                    {
                        StringBuilder fmt = new StringBuilder();

                        fmt.Append(acodec.Description);

                        if (acodec.AudioBitrate > 0)
                        {
                            if (fmt.Length > 0)
                            {
                                fmt.Append(", ");
                            }

                            fmt.Append(acodec.AudioBitrate.ToString()).Append(" kb/s");
                        }

                        if (acodec.AudioChannels > 0)
                        {
                            if (fmt.Length > 0)
                            {
                                fmt.Append(", ");
                            }

                            fmt.Append(acodec.AudioChannels.ToString()).Append(" channels");
                        }

                        if (acodec.AudioSampleRate > 0)
                        {
                            if (fmt.Length > 0)
                            {
                                fmt.Append(", ");
                            }

                            fmt.Append(acodec.AudioSampleRate.ToString()).Append(" Hz");
                        }

                        if (fmt.Length > 0)
                        {
                            formats[1] = fmt.ToString();
                            fmtCount++;
                        }
                    }
                }

                if ((codec.MediaTypes & MediaTypes.Photo) == TagLib.MediaTypes.Photo)
                {
                    IPhotoCodec pcodec = codec as IPhotoCodec;

                    // don't overwrite video dimensions
                    if ((w == int.MinValue) && (h == int.MinValue))
                    {
                        w = pcodec.PhotoWidth;
                        h = pcodec.PhotoHeight;
                    }

                    q = pcodec.PhotoQuality;

                    if (HasValidData(pcodec.Description))
                    {
                        formats[2] = pcodec.Description;
                        fmtCount++;
                    }
                }
            }

            // size format of libextrator is NxN
            if ((w > int.MinValue) && (h > int.MinValue))
            {
                AddData(metadata, MetadataType.SIZE, string.Format("{0}x{1}", w, h));
            }

            if (q > int.MinValue)
            {
                AddData(metadata, MetadataType.IMAGE_QUALITY, q.ToString());
            }


            // build format string
            StringBuilder sb = new StringBuilder();

            for (int i = 0; i < formats.Length; i++)
            {
                string s = formats[i];

                if (s == null)
                {
                    continue;
                }

                if (sb.Length > 0)
                {
                    sb.Append("; ");
                }

                if (fmtCount > 1)
                {
                    sb.AppendFormat("{0}: {1}", formatTypes[i], s);
                }
                else
                {
                    sb.Append(s);
                }
            }

            if (sb.Length > 0)
            {
                AddData(metadata, MetadataType.FORMAT, sb.ToString());
            }
        }