public virtual void mousePressed(MouseEvent evt) { this.xDragStart = java.lang.Math.max(0, evt.getX()); this.setSelectionStart(ByteCodeHelper.f2i((float)this.xDragStart / this.xScale)); this.setSelectionEnd(ByteCodeHelper.f2i((float)this.xDragStart / this.xScale)); this.repaint(); }
protected internal virtual void computeSpectrogram() { try { AudioDataInputStream audioDataInputStream = new AudioDataInputStream(this.audio); this.dataSource.setInputStream(audioDataInputStream); ArrayList arrayList = new ArrayList(); double num = double.Epsilon; Data data = this.frontEnd.getData(); int i; while (!(data is DataEndSignal)) { if (data is DoubleData) { double[] values = ((DoubleData)data).getValues(); double[] array = new double[values.Length]; for (i = 0; i < array.Length; i++) { array[i] = Math.max(Math.log(values[i]), (double)0f); if (array[i] > num) { num = array[i]; } } arrayList.add(array); } data = this.frontEnd.getData(); } audioDataInputStream.close(); int num2 = arrayList.size(); int num3 = ((double[])arrayList.get(0)).Length; i = num3 - 1; Dimension dimension = new Dimension(num2, num3); this.setMinimumSize(dimension); this.setMaximumSize(dimension); this.setPreferredSize(dimension); this.spectrogram = new BufferedImage(num2, num3, 1); double num4 = (255.0 + this.offsetFactor) / num; for (int j = 0; j < num2; j++) { double[] array2 = (double[])arrayList.get(j); for (int k = i; k >= 0; k--) { int num5 = ByteCodeHelper.d2i(array2[k] * num4 - this.offsetFactor); num5 = Math.max(num5, 0); num5 = 255 - num5; int num6 = (num5 << 16 & 16711680) | (num5 << 8 & 65280) | (num5 & 255); this.spectrogram.setRGB(j, i - k, num6); } } ReplicateScaleFilter replicateScaleFilter = new ReplicateScaleFilter(ByteCodeHelper.f2i(this.zoom * (float)num2), num3); this.scaledSpectrogram = this.createImage(new FilteredImageSource(this.spectrogram.getSource(), replicateScaleFilter)); Dimension size = this.getSize(); this.repaint(0L, 0, 0, size.width - 1, size.height - 1); } catch (System.Exception ex) { Throwable.instancehelper_printStackTrace(ex); } }
protected override void paintComponent(Graphics g) { base.paintComponent(g); Dimension size = this.getSize(); int num = size.height / 2; short[] audioData = this.audio.getAudioData(); JViewport viewport = this.getViewport(); int num2; int num3; if (viewport != null) { Rectangle viewRect = viewport.getViewRect(); num2 = ByteCodeHelper.d2i(viewRect.getX()); num3 = ByteCodeHelper.d2i(viewRect.getWidth()); } else { num2 = 0; num3 = ByteCodeHelper.f2i((float)audioData.Length * this.xScale); } g.setColor(Color.WHITE); g.fillRect(num2, 0, num3, size.height - 1); int num4 = java.lang.Math.max(0, this.getSelectionStart()); int num5 = ByteCodeHelper.f2i((float)num4 * this.xScale); num4 = this.getSelectionEnd(); if (num4 == -1) { num4 = audioData.Length - 1; } int num6 = ByteCodeHelper.f2i((float)num4 * this.xScale); g.setColor(Color.LIGHT_GRAY); g.fillRect(num5, 0, num6 - num5, size.height - 1); int[] array = new int[num3]; int[] array2 = new int[num3]; for (int i = 0; i < num3; i++) { array[i] = num2; num4 = ByteCodeHelper.f2i((float)num2 / this.xScale); if (num4 >= audioData.Length) { break; } array2[i] = num - ByteCodeHelper.f2i((float)audioData[num4] * this.yScale); num2++; } g.setColor(Color.RED); g.drawPolyline(array, array2, num3); for (int i = 0; i < this.labelTimes.Length; i++) { num2 = ByteCodeHelper.f2i(this.xScale * this.labelTimes[i] * this.audio.getAudioFormat().getSampleRate()); g.drawLine(num2, 0, num2, size.height - 1); g.drawString(this.labels[i], num2 + 5, size.height - 5); } }
public AudioInputStream nextElement() { AudioInputStream result = null; if (this.lastFile == null) { ConcatAudioFileDataSource.access_002(this.this_0, this.readNext()); } if (ConcatAudioFileDataSource.access_000(this.this_0) != null) { try { try { AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(ConcatAudioFileDataSource.access_000(this.this_0)); AudioFormat format = audioInputStream.getFormat(); if (!ConcatAudioFileDataSource.access_100(this.this_0)) { ConcatAudioFileDataSource.access_102(this.this_0, true); this.this_0.bigEndian = format.isBigEndian(); this.this_0.sampleRate = ByteCodeHelper.f2i(format.getSampleRate()); this.this_0.signedData = format.getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED); this.this_0.bytesPerValue = format.getSampleSizeInBits() / 8; } if (format.getSampleRate() != (float)this.this_0.sampleRate || format.getChannels() != 1 || format.isBigEndian() != this.this_0.bigEndian) { string text = "format mismatch for subsequent files"; throw new RuntimeException(text); } result = audioInputStream; ConcatAudioFileDataSource.access_200(this.this_0).finer(new StringBuilder().append("Strating processing of '").append(this.lastFile.getFile()).append('\'').toString()); Iterator iterator = this.this_0.__fileListeners.iterator(); while (iterator.hasNext()) { AudioFileProcessListener audioFileProcessListener = (AudioFileProcessListener)iterator.next(); AudioFileProcessListener audioFileProcessListener2 = audioFileProcessListener; audioFileProcessListener2.audioFileProcStarted(new File(ConcatAudioFileDataSource.access_000(this.this_0).getFile())); } this.lastFile = ConcatAudioFileDataSource.access_000(this.this_0); ConcatAudioFileDataSource.access_002(this.this_0, null); } catch (IOException ex) { Throwable.instancehelper_printStackTrace(ex); throw new Error(new StringBuilder().append("Cannot convert ").append(ConcatAudioFileDataSource.access_000(this.this_0)).append(" to a FileInputStream").toString()); } } catch (UnsupportedAudioFileException ex3) { Throwable.instancehelper_printStackTrace(ex3); } return(result); } return(result); }
protected internal virtual void zoomSet(float zoom) { this.xScale = this.originalXScale * zoom; int num = ByteCodeHelper.f2i((float)this.audio.getAudioData().Length *this.xScale); int num2 = ByteCodeHelper.f2i(65536f * this.yScale); this.setPreferredSize(new Dimension(num, num2)); this.revalidate(); this.repaint(); }
public virtual void mouseDragged(MouseEvent evt) { this.xDragEnd = evt.getX(); if (this.xDragEnd < ByteCodeHelper.f2i((float)this.getSelectionStart() * this.xScale)) { this.setSelectionStart(ByteCodeHelper.f2i((float)this.xDragEnd / this.xScale)); } else { this.setSelectionEnd(ByteCodeHelper.f2i((float)this.xDragEnd / this.xScale)); } this.repaint(); }
public virtual void loadScaleKMeans(string nom) { int num = 0; try { BufferedReader bufferedReader = new BufferedReader(new FileReader(nom)); while (bufferedReader.readLine() != null) { num++; } this.ngauss = num / 2; bufferedReader.close(); bufferedReader = new BufferedReader(new FileReader(nom)); string text = bufferedReader.readLine(); string[] array = String.instancehelper_split(text, " "); this.ncoefs = array.Length - 1; bufferedReader.close(); bufferedReader = new BufferedReader(new FileReader(nom)); this.allocate(); this.nT = 0; for (int i = 0; i < this.ngauss; i++) { text = bufferedReader.readLine(); array = String.instancehelper_split(text, " "); this.weights[i] = Float.parseFloat(array[0]); this.nT = ByteCodeHelper.f2i((float)this.nT + this.weights[i]); for (int j = 0; j < this.ncoefs; j++) { this.setMean(i, j, Float.parseFloat(array[j + 1])); } text = bufferedReader.readLine(); array = String.instancehelper_split(text, " "); for (int j = 0; j < this.ncoefs; j++) { this.setVar(i, j, Float.parseFloat(array[j])); } } for (int i = 0; i < this.ngauss; i++) { this.setWeight(i, this.weights[i] / (float)this.nT); } bufferedReader.close(); this.precomputeDistance(); } catch (IOException ex) { Throwable.instancehelper_printStackTrace(ex); } }
public void stateChanged(ChangeEvent changeEvent) { int num = ByteCodeHelper.f2i((float)AudioPanel.access_000(this.this_0).getAudioData().Length *AudioPanel.access_100(this.this_0)); int num2 = ByteCodeHelper.f2i(65536f * AudioPanel.access_200(this.this_0)); AudioPanel.access_302(this.this_0, new float[0]); AudioPanel.access_402(this.this_0, new string[0]); this.this_0.setSelectionStart(-1); this.this_0.setSelectionEnd(-1); this.this_0.setPreferredSize(new Dimension(num, num2)); Dimension size = this.this_0.getSize(); this.this_0.revalidate(); this.this_0.repaint(0L, 0, 0, size.width, size.height); }
protected internal virtual void zoomSet(float zoom) { this.zoom = zoom; if (this.spectrogram != null) { int width = this.spectrogram.getWidth(); int height = this.spectrogram.getHeight(); ReplicateScaleFilter replicateScaleFilter = new ReplicateScaleFilter(ByteCodeHelper.f2i(zoom * (float)width), height); this.scaledSpectrogram = this.createImage(new FilteredImageSource(this.spectrogram.getSource(), replicateScaleFilter)); Dimension dimension = new Dimension(ByteCodeHelper.f2i((float)width * zoom), height); this.setMinimumSize(dimension); this.setMaximumSize(dimension); this.setPreferredSize(dimension); this.repaint(); } }
public AudioPanel(AudioData audioData, float scaleX, float scaleY) { this.selectionStart = -1; this.selectionEnd = -1; this.audio = audioData; this.labelTimes = new float[0]; this.labels = new string[0]; this.xScale = scaleX; this.yScale = scaleY; this.originalXScale = this.xScale; int num = ByteCodeHelper.f2i((float)this.audio.getAudioData().Length *this.xScale); int num2 = ByteCodeHelper.f2i(65536f * this.yScale); this.setPreferredSize(new Dimension(num, num2)); this.setBackground(Color.white); this.audio.addChangeListener(new AudioPanel_1(this)); this.addMouseMotionListener(this); this.addMouseListener(this); this.setFocusable(true); this.requestFocus(); }
public virtual void setInputStream(AudioInputStream inputStream, string streamName) { this.dataStream = inputStream; this.streamEndReached = false; this.utteranceEndSent = false; this.utteranceStarted = false; AudioFormat format = inputStream.getFormat(); this.sampleRate = ByteCodeHelper.f2i(format.getSampleRate()); this.bigEndian = format.isBigEndian(); string text = format.toString(); this.logger.finer(new StringBuilder().append("input format is ").append(text).toString()); bool sampleSizeInBits = format.getSampleSizeInBits() != 0; int num = 8; if (num != -1 && (sampleSizeInBits ? 1 : 0) % num != 0) { string text2 = "StreamDataSource: bits per sample must be a multiple of 8."; throw new Error(text2); } this.bytesPerValue = format.getSampleSizeInBits() / 8; AudioFormat.Encoding encoding = format.getEncoding(); if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED)) { this.signedData = true; } else { if (!encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED)) { string text3 = "used file encoding is not supported"; throw new RuntimeException(text3); } this.signedData = false; } this.totalValuesRead = 0L; }
protected internal virtual void computeCepstrum() { try { AudioDataInputStream audioDataInputStream = new AudioDataInputStream(this.audio); this.dataSource.setInputStream(audioDataInputStream); ArrayList arrayList = new ArrayList(); float[] array = new float[100]; Arrays.fill(array, float.Epsilon); Data data = this.frontEnd.getData(); int i; while (!(data is DataEndSignal)) { if (data is FloatData) { float[] values = ((FloatData)data).getValues(); float[] array2 = new float[values.Length]; for (i = 0; i < array2.Length; i++) { array2[i] = values[i]; if (Math.abs(array2[i]) > array[i]) { array[i] = Math.abs(array2[i]); } } arrayList.add(array2); } if (data is DoubleData) { double[] values2 = ((DoubleData)data).getValues(); float[] array2 = new float[values2.Length]; for (i = 0; i < array2.Length; i++) { array2[i] = (float)values2[i]; if (Math.abs(array2[i]) > array[i]) { array[i] = Math.abs(array2[i]); } } arrayList.add(array2); } data = this.frontEnd.getData(); } audioDataInputStream.close(); int num = arrayList.size(); int num2 = ((float[])arrayList.get(0)).Length; i = num2 * 10; Dimension dimension = new Dimension(num, i); this.setMinimumSize(dimension); this.setMaximumSize(dimension); this.setPreferredSize(dimension); this.spectrogram = new BufferedImage(num, i, 1); for (int j = 0; j < num; j++) { float[] array3 = (float[])arrayList.get(j); for (int k = num2 - 1; k >= 0; k--) { int num3 = 127 - ByteCodeHelper.f2i(array3[k] / array[k] * 127f); int num4 = (num3 << 16 & 16711680) | (num3 << 8 & 65280) | (num3 & 255); for (int l = 0; l < 10; l++) { this.spectrogram.setRGB(j, i - 1 - k * 10 - l, num4); } } } ReplicateScaleFilter replicateScaleFilter = new ReplicateScaleFilter(ByteCodeHelper.f2i(this.zoom * (float)num), i); this.scaledSpectrogram = this.createImage(new FilteredImageSource(this.spectrogram.getSource(), replicateScaleFilter)); Dimension size = this.getSize(); this.repaint(0L, 0, 0, size.width - 1, size.height - 1); } catch (System.Exception ex) { Throwable.instancehelper_printStackTrace(ex); } }
public int compare(PrunableMixtureComponent prunableMixtureComponent, PrunableMixtureComponent prunableMixtureComponent2) { return(ByteCodeHelper.f2i(prunableMixtureComponent.getStoredScore() - prunableMixtureComponent2.getStoredScore())); }
public static int getSamplesPerShift(int sampleRate, float windowShiftInMs) { return(ByteCodeHelper.f2i((float)sampleRate * windowShiftInMs / 1000f)); }
private Data readData(Utterance utterance) { byte[] array = new byte[Microphone.access_1400(this.this_0)]; int channels = Microphone.access_500(this.this_0).getFormat().getChannels(); long num = this.totalSamplesRead; long num2 = (long)channels; long firstSampleNumber = (num2 != -1L) ? (num / num2) : (-num); int num3 = Microphone.access_500(this.this_0).read(array, 0, array.Length); if (!this.started) { lock (this) { this.started = true; System.Threading.Thread.MemoryBarrier(); Object.instancehelper_notifyAll(this); } } if (Microphone.access_1500(this.this_0).isLoggable(Level.FINE)) { Microphone.access_1600(this.this_0).info(new StringBuilder().append("Read ").append(num3).append(" bytes from audio stream.").toString()); } if (num3 <= 0) { return(null); } int num4 = Microphone.access_500(this.this_0).getFormat().getSampleSizeInBits() / 8; long num5 = this.totalSamplesRead; int num6 = num3; int num7 = num4; this.totalSamplesRead = num5 + (long)((num7 != -1) ? (num6 / num7) : (-(long)num6)); if (num3 != Microphone.access_1400(this.this_0)) { bool flag = num3 != 0; int num8 = num4; if (num8 != -1 && (flag ? 1 : 0) % num8 != 0) { string text = "Incomplete sample read."; throw new Error(text); } array = Arrays.copyOf(array, num3); } if (Microphone.access_300(this.this_0)) { utterance.add(array); } double[] array2; if (Microphone.access_1700(this.this_0)) { array2 = DataUtil.bytesToValues(array, 0, array.Length, num4, Microphone.access_1800(this.this_0)); } else { array2 = DataUtil.littleEndianBytesToValues(array, 0, array.Length, num4, Microphone.access_1800(this.this_0)); } if (channels > 1) { array2 = Microphone.access_1900(this.this_0, array2, channels); } return(new DoubleData(array2, ByteCodeHelper.f2i(Microphone.access_500(this.this_0).getFormat().getSampleRate()), firstSampleNumber)); }
public virtual short[] stop() { object obj; System.Threading.Monitor.Enter(obj = this.@lock); short[] array3; IOException ex2; try { if (this.recorder != null) { ByteArrayOutputStream byteArrayOutputStream = this.recorder.stopRecording(); this.microphone.close(); this.recorder = null; byte[] array = byteArrayOutputStream.toByteArray(); ByteArrayInputStream audioStream = new ByteArrayInputStream(array); try { short[] array2 = RawReader.readAudioData(audioStream, this.inFormat); if (this.downsample) { array2 = Downsampler.downsample(array2, ByteCodeHelper.f2i(this.inFormat.getSampleRate() / 1000f), ByteCodeHelper.f2i(this.outFormat.getSampleRate() / 1000f)); } array3 = array2; } catch (IOException ex) { ex2 = ex; goto IL_CA; } short[] result = array3; System.Threading.Monitor.Exit(obj); return(result); } short[] array4 = new short[0]; System.Threading.Monitor.Exit(obj); array3 = array4; } catch { System.Threading.Monitor.Exit(obj); throw; } return(array3); IL_CA: IOException ex3 = ex2; short[] result2; try { IOException ex4 = ex3; Throwable.instancehelper_printStackTrace(ex4); result2 = new short[0]; } finally { System.Threading.Monitor.Exit(obj); } return(result2); }
private bool open() { TargetDataLine targetDataLine = this.getAudioLine(); if (targetDataLine != null) { if (!targetDataLine.isOpen()) { this.logger.info("open"); try { targetDataLine.open(this.finalFormat, this.audioBufferSize); } catch (LineUnavailableException ex) { this.logger.severe(new StringBuilder().append("Can't open microphone ").append(Throwable.instancehelper_getMessage(ex)).toString()); return(false); } this.audioStream = new AudioInputStream(targetDataLine); if (this.doConversion) { this.audioStream = AudioSystem.getAudioInputStream(this.desiredFormat, this.audioStream); if (!Microphone.assertionsDisabled && this.audioStream == null) { throw new AssertionError(); } } float num = (float)this.msecPerRead / 1000f; this.frameSizeInBytes = this.audioStream.getFormat().getSampleSizeInBits() / 8 * ByteCodeHelper.f2i(num * this.audioStream.getFormat().getSampleRate()) * this.desiredFormat.getChannels(); this.logger.info(new StringBuilder().append("Frame size: ").append(this.frameSizeInBytes).append(" bytes").toString()); return(true); } return(true); } this.logger.severe("Can't find microphone"); return(false); }