Package javax.sound.sampled

Examples of javax.sound.sampled.TargetDataLine


      short[] inputBuffer = new short[BLOCK_SIZE * NUM_INPUT_CHANNELS];
      short[] outputBuffer = new short[BLOCK_SIZE * NUM_OUTPUT_CHANNELS];
      byte[] bInputBuffer = new byte[inputBuffer.length * 2]; // 2 bytes per sample
      byte[] bOutputBuffer = new byte[outputBuffer.length * 2];
  
      TargetDataLine targetDataLine = null;
      SourceDataLine sourceDataLine = null;
      try {
        // TODO(mhroth): ensure that stereo input and output lines are actually being returned
        // by the system.
       
        // open the audio input (line-in or microphone)
        targetDataLine = (TargetDataLine) AudioSystem.getLine(inputLineInfo);
        targetDataLine.open(inputAudioFormat, bInputBuffer.length);
        targetDataLine.start();
       
        // open the audio output
        sourceDataLine = (SourceDataLine) AudioSystem.getLine(outputLineInfo);
        sourceDataLine.open(outputAudioFormat, bOutputBuffer.length);
        sourceDataLine.start();
      } catch (LineUnavailableException lue) {
        lue.printStackTrace(System.err);
        System.exit(1);
      }
     
      // load the Pd patch
      File pdFile = new File(args[0]);
      ZenGarden pdPatch = null;
      ZenGardenAdapter zgAdapter = new ZenGardenAdapter();
      try {
        pdPatch = new ZenGarden(pdFile, BLOCK_SIZE, NUM_INPUT_CHANNELS, NUM_OUTPUT_CHANNELS,
            (float) SAMPLE_RATE);
        pdPatch.addListener(zgAdapter);
      } catch (NativeLoadException nle) {
        nle.printStackTrace(System.err);
        System.exit(2);
      }
     
      while (shouldContinuePlaying) {
        // run the patch in an infinite loop
        targetDataLine.read(bInputBuffer, 0, bInputBuffer.length); // read from the input
        // convert the byte buffer to a short buffer
        for (int i = 0, j = 0; i < inputBuffer.length; i++) {
          inputBuffer[i] = (short) (((int) bInputBuffer[j++]) << 8);
          inputBuffer[i] |= ((short) bInputBuffer[j++]) & 0x00FF;
        }
       
        pdPatch.process(inputBuffer, outputBuffer);
       
        // convert short buffer to byte buffer
        for (int i = 0, j = 0; i < outputBuffer.length; i++) {
          bOutputBuffer[j++] = (byte) ((outputBuffer[i] & 0xFF00) >> 8);
          bOutputBuffer[j++] = (byte) (outputBuffer[i] & 0x00FF);
        }
        // write to the output
        sourceDataLine.write(bOutputBuffer, 0, bOutputBuffer.length);
      }
     
      // release all audio resources
      targetDataLine.drain();
      targetDataLine.close();
      sourceDataLine.drain();
      sourceDataLine.close();
    }
  }
View Full Code Here


   * @param af The AudioFormat to stream with.
   * @throws LineUnavailableException If cannot open or stream the TargetDataLine.
   */
  private void upChannel(String urlStr, TargetDataLine tl, AudioFormat af) throws LineUnavailableException{
    final String murl = urlStr;
    final TargetDataLine mtl = tl;
    final AudioFormat maf = af;
    if(!mtl.isOpen()){
      mtl.open(maf);
      mtl.start();
    }
    new Thread ("Upstream Thread") {
      public void run() {
        openHttpsPostConnection(murl, mtl, maf);
      }
View Full Code Here

  private void listenSound(long songId, boolean isMatching)
      throws LineUnavailableException, IOException,
      UnsupportedAudioFileException {

    AudioFormat formatTmp = null;
    TargetDataLine lineTmp = null;
    String filePath = fileTextField.getText();
    AudioInputStream din = null;
    AudioInputStream outDin = null;
    PCM2PCMConversionProvider conversionProvider = new PCM2PCMConversionProvider();
    boolean isMicrophone = false;

    if (filePath == null || filePath.equals("") || isMatching) {

      formatTmp = getFormat(); // Fill AudioFormat with the wanted
                    // settings
      DataLine.Info info = new DataLine.Info(TargetDataLine.class,
          formatTmp);
      lineTmp = (TargetDataLine) AudioSystem.getLine(info);
      isMicrophone = true;
    } else {
      AudioInputStream in;

      if (filePath.contains("http")) {
        URL url = new URL(filePath);
        in = AudioSystem.getAudioInputStream(url);
      } else {
        File file = new File(filePath);
        in = AudioSystem.getAudioInputStream(file);
      }

      AudioFormat baseFormat = in.getFormat();

      System.out.println(baseFormat.toString());

      AudioFormat decodedFormat = new AudioFormat(
          AudioFormat.Encoding.PCM_SIGNED,
          baseFormat.getSampleRate(), 16, baseFormat.getChannels(),
          baseFormat.getChannels() * 2, baseFormat.getSampleRate(),
          false);

      din = AudioSystem.getAudioInputStream(decodedFormat, in);

      if (!conversionProvider.isConversionSupported(getFormat(),
          decodedFormat)) {
        System.out.println("Conversion is not supported");
      }

      System.out.println(decodedFormat.toString());

      outDin = conversionProvider.getAudioInputStream(getFormat(), din);
      formatTmp = decodedFormat;

      DataLine.Info info = new DataLine.Info(TargetDataLine.class,
          formatTmp);
      lineTmp = (TargetDataLine) AudioSystem.getLine(info);
    }

    final AudioFormat format = formatTmp;
    final TargetDataLine line = lineTmp;
    final boolean isMicro = isMicrophone;
    final AudioInputStream outDinSound = outDin;

    if (isMicro) {
      try {
        line.open(format);
        line.start();
      } catch (LineUnavailableException e) {
        e.printStackTrace();
      }
    }

    final long sId = songId;
    final boolean isMatch = isMatching;

    Thread listeningThread = new Thread(new Runnable() {
      public void run() {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        running = true;
        int n = 0;
        byte[] buffer = new byte[(int) 1024];

        try {
          while (running) {
            n++;
            if (n > 1000)
              break;

            int count = 0;
            if (isMicro) {
              count = line.read(buffer, 0, 1024);
            } else {
              count = outDinSound.read(buffer, 0, 1024);
            }
            if (count > 0) {
              out.write(buffer, 0, count);
            }
          }

          byte b[] = out.toByteArray();
          for (int i = 0; i < b.length; i++) {
            System.out.println(b[i]);
          }

          try {
            makeSpectrum(out, sId, isMatch);

            FileWriter fstream = new FileWriter("out.txt");
            BufferedWriter outFile = new BufferedWriter(fstream);

            byte bytes[] = out.toByteArray();
            for (int i = 0; i < b.length; i++) {
              outFile.write("" + b[i] + ";");
            }
            outFile.close();

          } catch (Exception e) {
            System.err.println("Error: " + e.getMessage());
          }

          out.close();
          line.close();
        } catch (IOException e) {
          System.err.println("I/O problems: " + e);
          System.exit(-1);
        }

View Full Code Here

     */
    public static TargetDataLine getTargetDataLine(AudioFormat audio_format,
                                                   org.vocvark.jAudioTools.AudioEventLineListener listener)
            throws Exception {
        DataLine.Info data_line_info = new DataLine.Info(TargetDataLine.class, audio_format);
        TargetDataLine target_data_line = null;
        target_data_line = (TargetDataLine) AudioSystem.getLine(data_line_info);
        if (listener != null)
            target_data_line.addLineListener(listener);
        target_data_line.open(audio_format);
        target_data_line.start();
        return target_data_line;
    }
View Full Code Here

    public static TargetDataLine getTargetDataLine(AudioFormat audio_format,
                                                   Mixer mixer,
                                                   org.vocvark.jAudioTools.AudioEventLineListener listener)
            throws Exception {
        DataLine.Info data_line_info = new DataLine.Info(TargetDataLine.class, audio_format);
        TargetDataLine target_data_line = null;
        target_data_line = (TargetDataLine) mixer.getLine(data_line_info);
        if (listener != null)
            target_data_line.addLineListener(listener);
        target_data_line.open(audio_format);
        target_data_line.start();
        return target_data_line;
    }
View Full Code Here

  @Override
  public Byte[] getData() {
    AudioFormat format = new AudioFormat(1000.0f, 16, 2, true, true);
   
    TargetDataLine line;
    DataLine.Info info = new DataLine.Info(TargetDataLine.class,
        format);
   
    if (!AudioSystem.isLineSupported(info)) {
      throw new JbrainException("Line not supported.");
    }
   
    byte[] data = new byte[dataSize];
   
    try {
        line = (TargetDataLine) AudioSystem.getLine(info);
        line.open(format);
      line.read(data, 0, data.length);
      line.flush();
      line.close();
    } catch (LineUnavailableException ex) {
      throw new JbrainException(ex.getMessage());
    }
   
    return ArrayUtils.toObject(data);
View Full Code Here

   
    DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);   
    if (AudioSystem.isLineSupported(info)) {
      // Obtain and open the line.   
      try {
        TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
          line.open(format);
         
          // Assume that the TargetDataLine, line, has already
        // been obtained and opened.
        ByteArrayOutputStream out  = new ByteArrayOutputStream();
        int numBytesRead;
        byte[] data = new byte[line.getBufferSize() / 5];

        // Begin audio capture.
        line.start();

        // Here, stopped is a global boolean set by another thread.
        while(out.size() < bytesRequired) {
           // Read the next chunk of data from the TargetDataLine.
           numBytesRead =  line.read(data, 0, data.length);
           // Save this chunk of data.
           out.write(data, 0, numBytesRead);
        }    
       
        WaveFile test = WaveGenerator.generateWaveFromRaw16bitPcm(out.toByteArray());
View Full Code Here

    try {
      AudioFormat linearFormat = new AudioFormat(1200, 8, 2, true, false);

      DataLine.Info info = new DataLine.Info(TargetDataLine.class,
          linearFormat);
      TargetDataLine targetDataLine = (TargetDataLine) AudioSystem
          .getLine(info);

      targetDataLine.open(linearFormat);
      targetDataLine.start();

      AudioInputStream linearStream = new AudioInputStream(targetDataLine);
      linearStream.read(voiceData, 0, voiceData.length);
      targetDataLine.stop();
      targetDataLine.close();

      File audioFile = new File("sample.wav");
      ByteArrayInputStream baiStream = new ByteArrayInputStream(voiceData);
      AudioInputStream aiStream = new AudioInputStream(baiStream,
          linearFormat, voiceData.length);
View Full Code Here

     * will have. Furthermore, we can give Java Sound a hint about how big
     * the internal buffer for the line should be. This isn't used here,
     * signaling that we don't care about the exact size. Java Sound will
     * use some default value for the buffer size.
     */
    TargetDataLine targetDataLine = null;
    DataLine.Info info = new DataLine.Info(TargetDataLine.class,
        audioFormat, nBufferSize);
    try {
      if (strMixerName != null) {
        Mixer.Info mixerInfo = getMixerInfo(strMixerName);
        if (mixerInfo == null) {
          out("AudioCommon.getTargetDataLine(): mixer not found: "
              + strMixerName);
          return null;
        }
        Mixer mixer = AudioSystem.getMixer(mixerInfo);
        targetDataLine = (TargetDataLine) mixer.getLine(info);
      } else {
        if (DEBUG) {
          out("AudioCommon.getTargetDataLine(): using default mixer");
        }
        targetDataLine = (TargetDataLine) AudioSystem.getLine(info);
      }

      /*
       * The line is there, but it is not yet ready to receive audio data.
       * We have to open the line.
       */
      if (DEBUG) {
        out("AudioCommon.getTargetDataLine(): opening line...");
      }
      targetDataLine.open(audioFormat, nBufferSize);
      if (DEBUG) {
        out("AudioCommon.getTargetDataLine(): opened line");
      }
    } catch (LineUnavailableException e) {
      if (DEBUG) {
View Full Code Here

    // 250: completes 20 samples in 8.95 seconds
    // 300: completes 20 samples in 9.40 seconds
    try {
      DataLine.Info info = new DataLine.Info(TargetDataLine.class,
          soundFormat);
      TargetDataLine targetDataLine = (TargetDataLine) AudioSystem
          .getLine(info);

      targetDataLine.open(soundFormat);
      targetDataLine.start();

      AudioInputStream linearStream = new AudioInputStream(targetDataLine);
      linearStream.read(voiceData, 0, voiceData.length);
      analyzeArray(voiceData);
      targetDataLine.stop();
      targetDataLine.close();

      ByteArrayInputStream baiStream = new ByteArrayInputStream(voiceData);
      AudioInputStream aiStream = new AudioInputStream(baiStream,
          soundFormat, voiceData.length);
      AudioSystem.write(aiStream, AudioFileFormat.Type.WAVE, soundFile);
View Full Code Here

TOP

Related Classes of javax.sound.sampled.TargetDataLine

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.