Package javax.sound.sampled

Examples of javax.sound.sampled.AudioFormat


        //{
          if (first)
          {
            first = false;
            System.out.println("frequency: "+decoder.getOutputFrequency() + ", channels: " + decoder.getOutputChannels());
            startOutput(new AudioFormat(decoder.getOutputFrequency(), 16, decoder.getOutputChannels(), true, false));
          }
          line.write(output.getBuffer(), 0, length);
          bitstream.closeFrame();
          header = bitstream.readFrame();
          //System.out.println("Mem:"+(rt.totalMemory() - rt.freeMemory())+"/"+rt.totalMemory());
View Full Code Here


        return sequencer;
      } else {
        final AudioInputStream ais = AudioSystem.getAudioInputStream(new File(
            fileName));

        final AudioFormat format = ais.getFormat();
        final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

        if(AudioSystem.isLineSupported(info)) {
          final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);

          line.open(format);
          line.start();

          new Thread("Reminder audio playing") {
            private boolean stopped;
            @Override
            public void run() {
              byte[] myData = new byte[1024 * format.getFrameSize()];
              int numBytesToRead = myData.length;
              int numBytesRead = 0;
              int total = 0;
              int totalToRead = (int) (format.getFrameSize() * ais.getFrameLength());
              stopped = false;

              line.addLineListener(new LineListener() {
                public void update(LineEvent event) {
                  if(line != null && !line.isRunning()) {
View Full Code Here

   * @param ais AudioInputStream to read from
   * @return WaveData containing data, or null if a failure occured
   */
  public static WaveData create(AudioInputStream ais) {
    //get format of data
    AudioFormat audioformat = ais.getFormat();

    // get channels
    int channels = 0;
    if (audioformat.getChannels() == 1) {
      if (audioformat.getSampleSizeInBits() == 8) {
        channels = AL10.AL_FORMAT_MONO8;
      } else if (audioformat.getSampleSizeInBits() == 16) {
        channels = AL10.AL_FORMAT_MONO16;
      } else {
        assert false : "Illegal sample size";
      }
    } else if (audioformat.getChannels() == 2) {
      if (audioformat.getSampleSizeInBits() == 8) {
        channels = AL10.AL_FORMAT_STEREO8;
      } else if (audioformat.getSampleSizeInBits() == 16) {
        channels = AL10.AL_FORMAT_STEREO16;
      } else {
        assert false : "Illegal sample size";
      }
    } else {
      assert false : "Only mono or stereo is supported";
    }

    //read data into buffer
    ByteBuffer buffer = null;
    try {
      int available = ais.available();
      if(available <= 0) {
        available = ais.getFormat().getChannels() * (int) ais.getFrameLength() * ais.getFormat().getSampleSizeInBits() / 8;
      }
      byte[] buf = new byte[ais.available()];
      int read = 0, total = 0;
      while ((read = ais.read(buf, total, buf.length - total)) != -1
        && total < buf.length) {
        total += read;
      }
      buffer = convertAudioBytes(buf, audioformat.getSampleSizeInBits() == 16);
    } catch (IOException ioe) {
      return null;
    }


    //create our result
    WaveData wavedata =
      new WaveData(buffer, channels, (int) audioformat.getSampleRate());

    //close stream
    try {
      ais.close();
    } catch (IOException ioe) {
View Full Code Here

         * information about the format of the audio data. These information
         * include the sampling frequency, the number of channels and the size
         * of the samples. These information are needed to ask Java Sound for a
         * suitable output line for this audio file.
         */
        AudioFormat audioFormat = audioInputStream.getFormat();

        /*
         * Asking for a line is a rather tricky thing. We have to construct an
         * Info object that specifies the desired properties for the line.
         * First, we have to say which kind of line we want. The possibilities
View Full Code Here

    /* For simplicity, the audio data format used for recording
       is hardcoded here. We use PCM 44.1 kHz, 16 bit signed,
       stereo.
    */
    AudioFormat  audioFormat = new AudioFormat(
      AudioFormat.Encoding.PCM_SIGNED,
      44100.0F, 16, 2, 4, 44100.0F, false);

    /* Now, we are trying to get a TargetDataLine. The
       TargetDataLine is used later to read audio data from it.
View Full Code Here

        try {
          final InputStream is = CheckSounds.class.getClassLoader().getResourceAsStream(
              soundBase + "/" + filename);
          final AudioInputStream ais = AudioSystem.getAudioInputStream(is);
          final AudioFormat format = ais.getFormat();
          final String formatString = format.toString();

          if (TESTPLAY_SAMPLES) {
            // testplay the sound
            final DataLine.Info info = new DataLine.Info(Clip.class, format);
            if (defaultMixer.isLineSupported(info)) {
              AudioInputStream playStream = ais;
              final AudioFormat defaultFormat = new AudioFormat(
                  format.getSampleRate(), 16, 1, false, true);
              if (AudioSystem.isConversionSupported(
                  defaultFormat, format)) {
                playStream = AudioSystem.getAudioInputStream(
                    defaultFormat, ais);
View Full Code Here

              //Cut away the piece at the end
             
              File outputFullWavCuttedFile = new File(outputFullWav);
             
              AudioInputStream aInputStream = AudioSystem.getAudioInputStream(outputFullWavCuttedFile);
              AudioFormat aFormat = aInputStream.getFormat();
              long frameLength = aInputStream.getFrameLength();
              float frameRate = aFormat.getFrameRate();
             
              double audioLength = Math.round(frameLength / frameRate);
             
              String newLength = ""+(new Float(audioLength)-((new Float(Math.abs(this.leftSideTime)))/1000));
             
              log.debug("newLength :newLength: "+newLength);
             
              String temporaryFullWaveAdd = outputFullWav;
             
              String hashFileFullNameAdd = flvRecordingMetaData.getStreamName()
                      + "_FULL_WAVE_CUT_LEFT.wav";
              String outputFullWavAdd = streamFolderName + hashFileFullNameAdd;
           
              String[] argv_add_sox = null;
               
              if (this.leftSideTime > 0) { 
                argv_add_sox = new String[] { this.getPathToSoX(),
                          temporaryFullWaveAdd, outputFullWavAdd, "trim",
                          "0",newLength };
              } else {
                argv_add_sox = new String[] { this.getPathToSoX(),
                    temporaryFullWaveAdd, outputFullWavAdd, "trim",
                    cutSecond,""+audioLength };
              }
             
              log.debug("START addPadAddStringToWaves ################# ");
              String padAddString = "";
              for (int i = 0; i < argv_add_sox.length; i++) {
                padAddString += " "+argv_add_sox[i];
                  log.debug(" i " + i + " argv-i " + argv_add_sox[i]);
              }
              log.debug("padAddString :LEFT: "+padAddString);
              log.debug("END addPadAddStringToWaves ################# ");
             
              returnLog.add(GenerateSWF.executeScript("addPadAddStringToAudio",argv_add_sox));
             
              outputFullWav = outputFullWavAdd;
              hashFileFullName = hashFileFullNameAdd;
             
            }
           
          } 
         
          if (flvRecordingMetaData.getInteriewPodId() == 2) {
            //Right
           
            if (this.rightSideTime != 0) {
              String temporaryFullWave = outputFullWav;
             
              String hashFileFullNameCut = flvRecordingMetaData.getStreamName()
                              + "_FULL_WAVE_ADDED_RIGHT.wav";
              String outputFullWavCut = streamFolderName + hashFileFullNameCut;
             
              String cutSecond = ""+((new Float(Math.abs(this.rightSideTime)))/1000);
             
              String[] argv_cut_sox = null;
              if (this.rightSideTime > 0) {
                argv_cut_sox = new String[] { this.getPathToSoX(),
                            temporaryFullWave, outputFullWavCut, "pad",
                            cutSecond,"0" };
              } else {
                argv_cut_sox = new String[] { this.getPathToSoX(),
                    temporaryFullWave, outputFullWavCut, "pad",
                    "0",cutSecond };
              }
             
              log.debug("START addPadCutStringToWaves ################# ");
              String padCutString = "";
              for (int i = 0; i < argv_cut_sox.length; i++) {
                padCutString += " "+argv_cut_sox[i];
                //log.debug(" i " + i + " argv-i " + argv_sox[i]);
              }
              log.debug("padCutString :RIGHT: "+padCutString);
              log.debug("END addPadCutStringToWaves ################# ");
 
              returnLog.add(GenerateSWF.executeScript("addPadCutStringToAudio",argv_cut_sox));
           
              outputFullWav = outputFullWavCut;
              hashFileFullName = hashFileFullNameCut;
             
              //Cut away the piece at the end
             
              File outputFullWavCuttedFile = new File(outputFullWav);
             
              AudioInputStream aInputStream = AudioSystem.getAudioInputStream(outputFullWavCuttedFile);
              AudioFormat aFormat = aInputStream.getFormat();
              long frameLength = aInputStream.getFrameLength();
              float frameRate = aFormat.getFrameRate();
             
              double audioLength = Math.round(frameLength / frameRate);
             
              String newLength = ""+(new Float(audioLength)-((new Float(Math.abs(this.leftSideTime)))/1000));
             
View Full Code Here

        });
    }

  public static void main(String[] args)
  {
    AudioFormat     format    = new AudioFormat(44100, 16, 2, true, false);
    DeviceEvaluator evaluator = new DeviceEvaluator();
    evaluator.setRating(Pattern.compile(".*PulseAudio.*")             , null, 1);
    evaluator.setRating(Pattern.compile(".*Java Sound Audio Engine.*"), null,-1);
   
    List<DeviceEvaluator.Device> list = evaluator.createDeviceList(format);
View Full Code Here

  public static void wavwrite(double[] d, int sr, String filename)
  {
    try
    {
      AudioWriter aw = new AudioWriter(new File(filename),
          new AudioFormat((int) sr, 16, 1, true, false),
          AudioFileFormat.Type.WAVE);
      aw.write(d, d.length);
      aw.close();
    }
    catch (IOException e)
View Full Code Here

  {
    // Create Extractor
    // ais = openInputStream(fn);
    // downsample to 8kHz
    AudioReader audioReader = AudioReaderFactory.getAudioReader(
            fn, new AudioFormat(DpweOnsetDetector.sr, bitsPerSamp, 1, signed,
                                bigEndian));

    // One sample per frame because we converted the file to mono.
    long sampleLength = audioReader.getFrameLength();
    // the mp3/flac decoders don't like to tell us the frame
View Full Code Here

TOP

Related Classes of javax.sound.sampled.AudioFormat

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.