///////////////////////////// executable code ////////////////////////////////


    // Read the median log2 values from the specifed metadata structure, convert
    // them back to 32-bit unsigned values and store them. If length is not
    // exactly correct then we flag and return an error.

    internal static int read_entropy_vars(WavpackStream wps, WavpackMetadata wpmd)
    {
        byte[]     byteptr = wpmd.data;
        int[]      b_array = new int[12];
        int        i       = 0;
        words_data w       = new words_data();

        for (i = 0; i < 6; i++)
        {
            b_array[i] = (int)(byteptr[i] & 0xff);
        }

        w.holding_one  = 0;
        w.holding_zero = 0;

        if (wpmd.byte_length != 12)
        {
            if ((wps.wphdr.flags & (Defines.MONO_FLAG | Defines.FALSE_STEREO)) == 0)
            {
                return(Defines.FALSE);
            }
        }

        w.c[0].median[0] = exp2s(b_array[0] + (b_array[1] << 8));
        w.c[0].median[1] = exp2s(b_array[2] + (b_array[3] << 8));
        w.c[0].median[2] = exp2s(b_array[4] + (b_array[5] << 8));

        if ((wps.wphdr.flags & (Defines.MONO_FLAG | Defines.FALSE_STEREO)) == 0)
        {
            for (i = 6; i < 12; i++)
            {
                b_array[i] = (int)(byteptr[i] & 0xff);
            }
            w.c[1].median[0] = exp2s(b_array[6] + (b_array[7] << 8));
            w.c[1].median[1] = exp2s(b_array[8] + (b_array[9] << 8));
            w.c[1].median[2] = exp2s(b_array[10] + (b_array[11] << 8));
        }

        wps.w = w;

        return(Defines.TRUE);
    }
    internal static int[] float_values(WavpackStream wps, int[] values, long num_values, int bufferStartPos)
    {
        int shift         = wps.float_max_exp - wps.float_norm_exp + wps.float_shift;
        int value_counter = bufferStartPos;

        if (shift > 32)
        {
            shift = 32;
        }
        else if (shift < -32)
        {
            shift = -32;
        }

        while (num_values > 0)
        {
            if (shift > 0)
            {
                values[value_counter] <<= shift;
            }
            else if (shift < 0)
            {
                values[value_counter] >>= -shift;
            }

            if (values[value_counter] > 8388607L)
            {
                values[value_counter] = (int)SupportClass.Identity(8388607L);
            }
            else if (values[value_counter] < -8388608L)
            {
                values[value_counter] = (int)SupportClass.Identity(-8388608L);
            }

            value_counter++;
            num_values--;
        }

        return(values);
    }
    internal static int read_float_info(WavpackStream wps, WavpackMetadata wpmd)
    {
        int bytecnt = wpmd.byte_length;

        byte[] byteptr = wpmd.data;
        int    counter = 0;


        if (bytecnt != 4)
        {
            return(Defines.FALSE);
        }

        wps.float_flags = byteptr[counter];
        counter++;
        wps.float_shift = byteptr[counter];
        counter++;
        wps.float_max_exp = byteptr[counter];
        counter++;
        wps.float_norm_exp = byteptr[counter];

        return(Defines.TRUE);
    }
Exemple #4
0
    // Find the WavPack block that contains the specified sample. If "header_pos"
    // is zero, then no information is assumed except the total number of samples
    // in the file and its size in bytes. If "header_pos" is non-zero then we
    // assume that it is the file position of the valid header image contained in
    // the first stream and we can limit our search to either the portion above
    // or below that point. If a .wvc file is being used, then this must be called
    // for that file also.
    private static void seek(WavpackContext wpc, System.IO.BinaryReader infile, long headerPos, long targetSample)
    {
        try
        {
            WavpackStream wps = wpc.stream;
            long          file_pos1 = 0;
            long          file_pos2 = wpc.infile.BaseStream.Length;
            long          sample_pos1 = 0, sample_pos2 = wpc.total_samples;
            double        ratio     = 0.96;
            int           file_skip = 0;
            if (targetSample >= wpc.total_samples)
            {
                return;
            }
            if (headerPos > 0 && wps.wphdr.block_samples > 0)
            {
                if (wps.wphdr.block_index > targetSample)
                {
                    sample_pos2 = wps.wphdr.block_index;
                    file_pos2   = headerPos;
                }
                else if (wps.wphdr.block_index + wps.wphdr.block_samples <= targetSample)
                {
                    sample_pos1 = wps.wphdr.block_index;
                    file_pos1   = headerPos;
                }
                else
                {
                    return;
                }
            }
            while (true)
            {
                double bytes_per_sample;
                long   seek_pos;
                bytes_per_sample  = file_pos2 - file_pos1;
                bytes_per_sample /= sample_pos2 - sample_pos1;
                seek_pos          = file_pos1 + (file_skip > 0 ? 32 : 0);
                seek_pos         += (long)(bytes_per_sample * (targetSample - sample_pos1) * ratio);
                infile.BaseStream.Seek(seek_pos, 0);

                long temppos = infile.BaseStream.Position;
                wps.wphdr = read_next_header(infile, wps.wphdr);

                if (wps.wphdr.status == 1 || seek_pos >= file_pos2)
                {
                    if (ratio > 0.0)
                    {
                        if ((ratio -= 0.24) < 0.0)
                        {
                            ratio = 0.0;
                        }
                    }
                    else
                    {
                        return;
                    }
                }
                else if (wps.wphdr.block_index > targetSample)
                {
                    sample_pos2 = wps.wphdr.block_index;
                    file_pos2   = seek_pos;
                }
                else if (wps.wphdr.block_index + wps.wphdr.block_samples <= targetSample)
                {
                    if (seek_pos == file_pos1)
                    {
                        file_skip = 1;
                    }
                    else
                    {
                        sample_pos1 = wps.wphdr.block_index;
                        file_pos1   = seek_pos;
                    }
                }
                else
                {
                    int index = (int)(targetSample - wps.wphdr.block_index);
                    infile.BaseStream.Seek(seek_pos, 0);
                    WavpackContext c = WavpackOpenFileInput(infile);
                    wpc.stream = c.stream;
                    int[] temp_buf = new int[Defines.SAMPLE_BUFFER_SIZE];
                    while (index > 0)
                    {
                        int toUnpack = Math.Min(index, Defines.SAMPLE_BUFFER_SIZE / WavpackGetReducedChannels(wpc));
                        WavpackUnpackSamples(wpc, temp_buf, toUnpack);
                        index = index - toUnpack;
                    }
                    return;
                }
            }
        }
        catch (System.IO.IOException)
        {
        }
    }
Exemple #5
0
    ///////////////////////////// executable code ////////////////////////////////


    // This function reads data from the specified stream in search of a valid
    // WavPack 4.0 audio block. If this fails in 1 megabyte (or an invalid or
    // unsupported WavPack block is encountered) then an appropriate message is
    // copied to "error" and NULL is returned, otherwise a pointer to a
    // WavpackContext structure is returned (which is used to call all other
    // functions in this module). This can be initiated at the beginning of a
    // WavPack file, or anywhere inside a WavPack file. To determine the exact
    // position within the file use WavpackGetSampleIndex().  Also,
    // this function will not handle "correction" files, plays only the first
    // two channels of multi-channel files, and is limited in resolution in some
    // large integer or floating point files (but always provides at least 24 bits
    // of resolution).

    public static WavpackContext WavpackOpenFileInput(System.IO.BinaryReader infile)
    {
        WavpackContext wpc = new WavpackContext();
        WavpackStream  wps = wpc.stream;

        wpc.infile        = infile;
        wpc.total_samples = -1;
        wpc.norm_offset   = 0;
        wpc.open_flags    = 0;


        // open the source file for reading and store the size

        while (wps.wphdr.block_samples == 0)
        {
            wps.wphdr = read_next_header(wpc.infile, wps.wphdr);

            if (wps.wphdr.status == 1)
            {
                wpc.error_message = "not compatible with this version of WavPack file!";
                wpc.error         = true;
                return(wpc);
            }

            if (wps.wphdr.block_samples > 0 && wps.wphdr.total_samples != -1)
            {
                wpc.total_samples = wps.wphdr.total_samples;
            }

            // lets put the stream back in the context

            wpc.stream = wps;

            if ((UnpackUtils.unpack_init(wpc)) == Defines.FALSE)
            {
                wpc.error = true;
                return(wpc);
            }
        }         // end of while

        wpc.config.flags = wpc.config.flags & ~0xff;
        wpc.config.flags = wpc.config.flags | (wps.wphdr.flags & 0xff);

        wpc.config.bytes_per_sample = (int)((wps.wphdr.flags & Defines.BYTES_STORED) + 1);
        wpc.config.float_norm_exp   = wps.float_norm_exp;

        wpc.config.bits_per_sample = (int)((wpc.config.bytes_per_sample * 8) - ((wps.wphdr.flags & Defines.SHIFT_MASK) >> Defines.SHIFT_LSB));

        if ((wpc.config.flags & Defines.FLOAT_DATA) > 0)
        {
            wpc.config.bytes_per_sample = 3;
            wpc.config.bits_per_sample  = 24;
        }

        if (wpc.config.sample_rate == 0)
        {
            if (wps.wphdr.block_samples == 0 || (wps.wphdr.flags & Defines.SRATE_MASK) == Defines.SRATE_MASK)
            {
                wpc.config.sample_rate = 44100;
            }
            else
            {
                wpc.config.sample_rate = sample_rates[(int)((wps.wphdr.flags & Defines.SRATE_MASK) >> Defines.SRATE_LSB)];
            }
        }

        if (wpc.config.num_channels == 0)
        {
            if ((wps.wphdr.flags & Defines.MONO_FLAG) > 0)
            {
                wpc.config.num_channels = 1;
            }
            else
            {
                wpc.config.num_channels = 2;
            }

            wpc.config.channel_mask = 0x5 - wpc.config.num_channels;
        }

        if ((wps.wphdr.flags & Defines.FINAL_BLOCK) == 0)
        {
            if ((wps.wphdr.flags & Defines.MONO_FLAG) != 0)
            {
                wpc.reduced_channels = 1;
            }
            else
            {
                wpc.reduced_channels = 2;
            }
        }

        return(wpc);
    }
Exemple #6
0
    // Unpack the specified number of samples from the current file position.
    // Note that "samples" here refers to "complete" samples, which would be
    // 2 longs for stereo files. The audio data is returned right-justified in
    // 32-bit longs in the endian mode native to the executing processor. So,
    // if the original data was 16-bit, then the values returned would be
    // +/-32k. Floating point data will be returned as 24-bit integers (and may
    // also be clipped). The actual number of samples unpacked is returned,
    // which should be equal to the number requested unless the end of fle is
    // encountered or an error occurs.

    internal static long WavpackUnpackSamples(WavpackContext wpc, int[] buffer, long samples)
    {
        WavpackStream wps = wpc.stream;
        long          samples_unpacked = 0, samples_to_unpack;
        int           num_channels = wpc.config.num_channels;
        int           bcounter     = 0;

        int buf_idx        = 0;
        int bytes_returned = 0;

        while (samples > 0)
        {
            if (wps.wphdr.block_samples == 0 || (wps.wphdr.flags & Defines.INITIAL_BLOCK) == 0 || wps.sample_index >= wps.wphdr.block_index + wps.wphdr.block_samples)
            {
                wps.wphdr = read_next_header(wpc.infile, wps.wphdr);

                if (wps.wphdr.status == 1)
                {
                    break;
                }

                if (wps.wphdr.block_samples == 0 || wps.sample_index == wps.wphdr.block_index)
                {
                    if ((UnpackUtils.unpack_init(wpc)) == Defines.FALSE)
                    {
                        break;
                    }
                }
            }

            if (wps.wphdr.block_samples == 0 || (wps.wphdr.flags & Defines.INITIAL_BLOCK) == 0 || wps.sample_index >= wps.wphdr.block_index + wps.wphdr.block_samples)
            {
                continue;
            }

            if (wps.sample_index < wps.wphdr.block_index)
            {
                samples_to_unpack = wps.wphdr.block_index - wps.sample_index;

                if (samples_to_unpack > samples)
                {
                    samples_to_unpack = samples;
                }

                wps.sample_index += samples_to_unpack;
                samples_unpacked += samples_to_unpack;
                samples          -= samples_to_unpack;

                if (wpc.reduced_channels > 0)
                {
                    samples_to_unpack *= wpc.reduced_channels;
                }
                else
                {
                    samples_to_unpack *= num_channels;
                }

                bcounter = buf_idx;

                while (samples_to_unpack > 0)
                {
                    buffer[bcounter] = 0;
                    bcounter++;
                    samples_to_unpack--;
                }
                buf_idx = bcounter;

                continue;
            }

            samples_to_unpack = wps.wphdr.block_index + wps.wphdr.block_samples - wps.sample_index;

            if (samples_to_unpack > samples)
            {
                samples_to_unpack = samples;
            }

            UnpackUtils.unpack_samples(wpc, buffer, samples_to_unpack, buf_idx);

            if (wpc.reduced_channels > 0)
            {
                bytes_returned = (int)(samples_to_unpack * wpc.reduced_channels);
            }
            else
            {
                bytes_returned = (int)(samples_to_unpack * num_channels);
            }

            buf_idx += bytes_returned;

            samples_unpacked += samples_to_unpack;
            samples          -= samples_to_unpack;

            if (wps.sample_index == wps.wphdr.block_index + wps.wphdr.block_samples)
            {
                if (UnpackUtils.check_crc_error(wpc) > 0)
                {
                    wpc.crc_errors++;
                }
            }

            if (wps.sample_index == wpc.total_samples)
            {
                break;
            }
        }

        return(samples_unpacked);
    }
    // Read the hybrid related values from the specifed metadata structure, convert
    // them back to their internal formats and store them. The extended profile
    // stuff is not implemented yet, so return an error if we get more data than
    // we know what to do with.

    internal static int read_hybrid_profile(WavpackStream wps, WavpackMetadata wpmd)
    {
        byte[] byteptr         = wpmd.data;
        int    bytecnt         = wpmd.byte_length;
        int    buffer_counter  = 0;
        int    uns_buf         = 0;
        int    uns_buf_plusone = 0;

        if ((wps.wphdr.flags & Defines.HYBRID_BITRATE) != 0)
        {
            uns_buf         = (int)(byteptr[buffer_counter] & 0xff);
            uns_buf_plusone = (int)(byteptr[buffer_counter + 1] & 0xff);

            wps.w.c[0].slow_level = exp2s(uns_buf + (uns_buf_plusone << 8));
            buffer_counter        = buffer_counter + 2;

            if ((wps.wphdr.flags & (Defines.MONO_FLAG | Defines.FALSE_STEREO)) == 0)
            {
                uns_buf               = (int)(byteptr[buffer_counter] & 0xff);
                uns_buf_plusone       = (int)(byteptr[buffer_counter + 1] & 0xff);
                wps.w.c[1].slow_level = exp2s(uns_buf + (uns_buf_plusone << 8));
                buffer_counter        = buffer_counter + 2;
            }
        }

        uns_buf         = (int)(byteptr[buffer_counter] & 0xff);
        uns_buf_plusone = (int)(byteptr[buffer_counter + 1] & 0xff);

        wps.w.bitrate_acc[0] = (int)(uns_buf + (uns_buf_plusone << 8)) << 16;
        buffer_counter       = buffer_counter + 2;

        if ((wps.wphdr.flags & (Defines.MONO_FLAG | Defines.FALSE_STEREO)) == 0)
        {
            uns_buf         = (int)(byteptr[buffer_counter] & 0xff);
            uns_buf_plusone = (int)(byteptr[buffer_counter + 1] & 0xff);

            wps.w.bitrate_acc[1] = (int)(uns_buf + (uns_buf_plusone << 8)) << 16;
            buffer_counter       = buffer_counter + 2;
        }

        if (buffer_counter < bytecnt)
        {
            uns_buf         = (int)(byteptr[buffer_counter] & 0xff);
            uns_buf_plusone = (int)(byteptr[buffer_counter + 1] & 0xff);

            wps.w.bitrate_delta[0] = exp2s((short)(uns_buf + (uns_buf_plusone << 8)));
            buffer_counter         = buffer_counter + 2;

            if ((wps.wphdr.flags & (Defines.MONO_FLAG | Defines.FALSE_STEREO)) == 0)
            {
                uns_buf                = (int)(byteptr[buffer_counter] & 0xff);
                uns_buf_plusone        = (int)(byteptr[buffer_counter + 1] & 0xff);
                wps.w.bitrate_delta[1] = exp2s((short)(uns_buf + (uns_buf_plusone << 8)));
                buffer_counter         = buffer_counter + 2;
            }

            if (buffer_counter < bytecnt)
            {
                return(Defines.FALSE);
            }
        }
        else
        {
            wps.w.bitrate_delta[0] = wps.w.bitrate_delta[1] = 0;
        }

        return(Defines.TRUE);
    }
Exemple #8
0
    internal static int process_metadata(WavpackContext wpc, WavpackMetadata wpmd)
    {
        WavpackStream wps = wpc.stream;

        switch (wpmd.id)
        {
        case Defines.ID_DUMMY:
        {
            return(Defines.TRUE);
        }


        case Defines.ID_DECORR_TERMS:
        {
            return(UnpackUtils.read_decorr_terms(wps, wpmd));
        }


        case Defines.ID_DECORR_WEIGHTS:
        {
            return(UnpackUtils.read_decorr_weights(wps, wpmd));
        }


        case Defines.ID_DECORR_SAMPLES:
        {
            return(UnpackUtils.read_decorr_samples(wps, wpmd));
        }


        case Defines.ID_ENTROPY_VARS:
        {
            return(WordsUtils.read_entropy_vars(wps, wpmd));
        }


        case Defines.ID_HYBRID_PROFILE:
        {
            return(WordsUtils.read_hybrid_profile(wps, wpmd));
        }


        case Defines.ID_FLOAT_INFO:
        {
            return(FloatUtils.read_float_info(wps, wpmd));
        }


        case Defines.ID_INT32_INFO:
        {
            return(UnpackUtils.read_int32_info(wps, wpmd));
        }


        case Defines.ID_CHANNEL_INFO:
        {
            return(UnpackUtils.read_channel_info(wpc, wpmd));
        }


        case Defines.ID_SAMPLE_RATE:
        {
            return(UnpackUtils.read_sample_rate(wpc, wpmd));
        }


        case Defines.ID_CONFIG_BLOCK:
        {
            return(UnpackUtils.read_config_info(wpc, wpmd));
        }


        case Defines.ID_WV_BITSTREAM:
        {
            return(UnpackUtils.init_wv_bitstream(wpc, wpmd));
        }


        case Defines.ID_SHAPING_WEIGHTS:
        case Defines.ID_WVC_BITSTREAM:
        case Defines.ID_WVX_BITSTREAM:
        {
            return(Defines.TRUE);
        }


        default:
        {
            if ((wpmd.id & Defines.ID_OPTIONAL_DATA) != 0)
            {
                return(Defines.TRUE);
            }
            else
            {
                return(Defines.FALSE);
            }
        }
        break;
        }
    }