Examples of AudioInputStream


Examples of javax.sound.sampled.AudioInputStream

          }
        }.start();

        return sequencer;
      } else {
        final AudioInputStream ais = AudioSystem.getAudioInputStream(new File(
            fileName));

        final AudioFormat format = ais.getFormat();
        final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

        if(AudioSystem.isLineSupported(info)) {
          final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);

          line.open(format);
          line.start();

          new Thread("Reminder audio playing") {
            private boolean stopped;
            @Override
            public void run() {
              byte[] myData = new byte[1024 * format.getFrameSize()];
              int numBytesToRead = myData.length;
              int numBytesRead = 0;
              int total = 0;
              int totalToRead = (int) (format.getFrameSize() * ais.getFrameLength());
              stopped = false;

              line.addLineListener(new LineListener() {
                public void update(LineEvent event) {
                  if(line != null && !line.isRunning()) {
                    stopped = true;
                    line.close();
                    try {
                      ais.close();
                    }catch(Exception ee) {
                      // ignore
                    }
                  }
                }
              });

              try {
                while (total < totalToRead && !stopped) {
                  numBytesRead = ais.read(myData, 0, numBytesToRead);

                  if (numBytesRead == -1) {
                    break;
                  }

View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

    }
  }
 
  private void executeStreamCreationTest() {
    try {
      AudioInputStream ais = AudioSystem.getAudioInputStream(new File(filePath));
      WaveData wd = WaveData.create(ais);
      if(wd == null) {
        System.out.println("executeMidStreamCreationTest::success");
      }
    } catch (Exception e) {
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

 

  private void executeMidStreamCreationTest() {
    try {
     
      AudioInputStream ais = AudioSystem.getAudioInputStream(WaveDataTest.class.getClassLoader().getResource(filePath));     
      int totalSize = ais.getFormat().getChannels() * (int) ais.getFrameLength() * ais.getFormat().getSampleSizeInBits() / 8;
     
      // skip 1/4 of the stream
      int skip = totalSize / 4;
      long skipped = ais.skip(skip);
     
      WaveData wd = WaveData.create(ais);
      if(wd == null) {
        System.out.println("executeMidStreamCreationTest::success");
      }
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

        //this code taken from jseresources.org. Thanks!

        /*
         * We have to read in the sound file.
         */
        AudioInputStream audioInputStream = null;
        try {
            audioInputStream = AudioSystem.getAudioInputStream(soundFile);
        } catch (Exception e) {
            /*
             * In case of an exception, we dump the exception including the
             * stack trace to the console output. Then, we exit the program.
             */
            com.valhalla.Logger.logException(e);
        }

        /*
         * From the AudioInputStream, i.e. from the sound file, we fetch
         * information about the format of the audio data. These information
         * include the sampling frequency, the number of channels and the size
         * of the samples. These information are needed to ask Java Sound for a
         * suitable output line for this audio file.
         */
        AudioFormat audioFormat = audioInputStream.getFormat();

        /*
         * Asking for a line is a rather tricky thing. We have to construct an
         * Info object that specifies the desired properties for the line.
         * First, we have to say which kind of line we want. The possibilities
         * are: SourceDataLine (for playback), Clip (for repeated playback) and
         * TargetDataLine (for recording). Here, we want to do normal playback,
         * so we ask for a SourceDataLine. Then, we have to pass an AudioFormat
         * object, so that the Line knows which format the data passed to it
         * will have. Furthermore, we can give Java Sound a hint about how big
         * the internal buffer for the line should be. This isn't used here,
         * signaling that we don't care about the exact size. Java Sound will
         * use some default value for the buffer size.
         */
        SourceDataLine line = null;
        DataLine.Info info = new DataLine.Info(SourceDataLine.class,
                audioFormat);
        try {
            line = (SourceDataLine) AudioSystem.getLine(info);

            /*
             * The line is there, but it is not yet ready to receive audio data.
             * We have to open the line.
             */
            line.open(audioFormat);
        } catch (LineUnavailableException e) {
            com.valhalla.Logger.logException(e);
        } catch (Exception e) {
            com.valhalla.Logger.logException(e);
        }

        if (line == null) {
            player.nullIt();

            return;
        }

        /*
         * Still not enough. The line now can receive data, but will not pass
         * them on to the audio output device (which means to your sound card).
         * This has to be activated.
         */
        line.start();

        /*
         * Ok, finally the line is prepared. Now comes the real job: we have to
         * write data to the line. We do this in a loop. First, we read data
         * from the AudioInputStream to a buffer. Then, we write from this
         * buffer to the Line. This is done until the end of the file is
         * reached, which is detected by a return value of -1 from the read
         * method of the AudioInputStream.
         */
        int nBytesRead = 0;
        byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
        while (nBytesRead != -1) {
            try {
                nBytesRead = audioInputStream.read(abData, 0, abData.length);
            } catch (IOException e) {
                com.valhalla.Logger.logException(e);
            }
            if (nBytesRead >= 0) {
                int nBytesWritten = line.write(abData, 0, nBytesRead);
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

  public SimpleAudioRecorder(TargetDataLine line,
             AudioFileFormat.Type targetType,
             File file)
  {
    m_line = line;
    m_audioInputStream = new AudioInputStream(line);
    m_targetType = targetType;
    m_outputFile = file;
  }
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

  }
 
  protected void write(InputStream in, OutputStream out, javax.sound.sampled.AudioFormat javaSoundFormat) throws IOException
  {
    final long lengthInFrames = Integer.MAX_VALUE;  // TODO: get java.io.IOException: stream length not specified for most formats (WAV, AIFF)
    final AudioInputStream ais = new AudioInputStream(in, javaSoundFormat, lengthInFrames);
    final AudioFileFormat.Type targetFileFormatType = audioFileFormatType;
    final int bytesWritten = AudioSystem.write(ais, targetFileFormatType, out);
    logger.fine("Audio OutputStream bytes written: " + bytesWritten);
  }
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

    {
      super();
   
      // determine format and frame size.
      {
        final AudioInputStream aisForFormat;
        final PullSourceStreamInputStream pssisForFormat;
       
        pssisForFormat = new PullSourceStreamInputStream(pssForFormat);
        aisForFormat = AudioSystem.getAudioInputStream(markSupportedInputStream(pssisForFormat));
        this.javaSoundInputFormat = aisForFormat.getFormat();
        this.frameLength = aisForFormat.getFrameLength();
        this.format = JavaSoundUtils.convertFormat(javaSoundInputFormat);
       
        logger.fine("JavaSoundParser: java sound format: " + javaSoundInputFormat);
        logger.fine("JavaSoundParser: jmf format: " + format);
        logger.fine("JavaSoundParser: Frame length=" + frameLength);
       
        aisForFormat.close();
        pssisForFormat.close();
      }
     
     
      setPssForReadFrame(pssForReadFrame);
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

        }

        try {
          final InputStream is = CheckSounds.class.getClassLoader().getResourceAsStream(
              soundBase + "/" + filename);
          final AudioInputStream ais = AudioSystem.getAudioInputStream(is);
          final AudioFormat format = ais.getFormat();
          final String formatString = format.toString();

          if (TESTPLAY_SAMPLES) {
            // testplay the sound
            final DataLine.Info info = new DataLine.Info(Clip.class, format);
            if (defaultMixer.isLineSupported(info)) {
              AudioInputStream playStream = ais;
              final AudioFormat defaultFormat = new AudioFormat(
                  format.getSampleRate(), 16, 1, false, true);
              if (AudioSystem.isConversionSupported(
                  defaultFormat, format)) {
                playStream = AudioSystem.getAudioInputStream(
                    defaultFormat, ais);
              } else {
                System.out.println("conversion not supported (to "
                    + defaultFormat + ")");
              }

              System.out.println("testplaying " + name + " "
                  + playStream.getFormat());

              final Clip line = (Clip) defaultMixer.getLine(info);
              line.open(playStream);
              line.loop(2);
              final TestLineListener testListener = new TestLineListener();
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

             
              //Cut away the piece at the end
             
              File outputFullWavCuttedFile = new File(outputFullWav);
             
              AudioInputStream aInputStream = AudioSystem.getAudioInputStream(outputFullWavCuttedFile);
              AudioFormat aFormat = aInputStream.getFormat();
              long frameLength = aInputStream.getFrameLength();
              float frameRate = aFormat.getFrameRate();
             
              double audioLength = Math.round(frameLength / frameRate);
             
              String newLength = ""+(new Float(audioLength)-((new Float(Math.abs(this.leftSideTime)))/1000));
             
              log.debug("newLength :newLength: "+newLength);
             
              String temporaryFullWaveAdd = outputFullWav;
             
              String hashFileFullNameAdd = flvRecordingMetaData.getStreamName()
                      + "_FULL_WAVE_CUT_LEFT.wav";
              String outputFullWavAdd = streamFolderName + hashFileFullNameAdd;
           
              String[] argv_add_sox = null;
               
              if (this.leftSideTime > 0) { 
                argv_add_sox = new String[] { this.getPathToSoX(),
                          temporaryFullWaveAdd, outputFullWavAdd, "trim",
                          "0",newLength };
              } else {
                argv_add_sox = new String[] { this.getPathToSoX(),
                    temporaryFullWaveAdd, outputFullWavAdd, "trim",
                    cutSecond,""+audioLength };
              }
             
              log.debug("START addPadAddStringToWaves ################# ");
              String padAddString = "";
              for (int i = 0; i < argv_add_sox.length; i++) {
                padAddString += " "+argv_add_sox[i];
                  log.debug(" i " + i + " argv-i " + argv_add_sox[i]);
              }
              log.debug("padAddString :LEFT: "+padAddString);
              log.debug("END addPadAddStringToWaves ################# ");
             
              returnLog.add(GenerateSWF.executeScript("addPadAddStringToAudio",argv_add_sox));
             
              outputFullWav = outputFullWavAdd;
              hashFileFullName = hashFileFullNameAdd;
             
            }
           
          } 
         
          if (flvRecordingMetaData.getInteriewPodId() == 2) {
            //Right
           
            if (this.rightSideTime != 0) {
              String temporaryFullWave = outputFullWav;
             
              String hashFileFullNameCut = flvRecordingMetaData.getStreamName()
                              + "_FULL_WAVE_ADDED_RIGHT.wav";
              String outputFullWavCut = streamFolderName + hashFileFullNameCut;
             
              String cutSecond = ""+((new Float(Math.abs(this.rightSideTime)))/1000);
             
              String[] argv_cut_sox = null;
              if (this.rightSideTime > 0) {
                argv_cut_sox = new String[] { this.getPathToSoX(),
                            temporaryFullWave, outputFullWavCut, "pad",
                            cutSecond,"0" };
              } else {
                argv_cut_sox = new String[] { this.getPathToSoX(),
                    temporaryFullWave, outputFullWavCut, "pad",
                    "0",cutSecond };
              }
             
              log.debug("START addPadCutStringToWaves ################# ");
              String padCutString = "";
              for (int i = 0; i < argv_cut_sox.length; i++) {
                padCutString += " "+argv_cut_sox[i];
                //log.debug(" i " + i + " argv-i " + argv_sox[i]);
              }
              log.debug("padCutString :RIGHT: "+padCutString);
              log.debug("END addPadCutStringToWaves ################# ");
 
              returnLog.add(GenerateSWF.executeScript("addPadCutStringToAudio",argv_cut_sox));
           
              outputFullWav = outputFullWavCut;
              hashFileFullName = hashFileFullNameCut;
             
              //Cut away the piece at the end
             
              File outputFullWavCuttedFile = new File(outputFullWav);
             
              AudioInputStream aInputStream = AudioSystem.getAudioInputStream(outputFullWavCuttedFile);
              AudioFormat aFormat = aInputStream.getFormat();
              long frameLength = aInputStream.getFrameLength();
              float frameRate = aFormat.getFrameRate();
             
              double audioLength = Math.round(frameLength / frameRate);
             
              String newLength = ""+(new Float(audioLength)-((new Float(Math.abs(this.leftSideTime)))/1000));
View Full Code Here

Examples of javax.sound.sampled.AudioInputStream

            PCMStreamConverter converter = (PCMStreamConverter)mGenerator;
            converter.close();

            try
            {
                AudioInputStream stream = AudioSystem.getAudioInputStream(mResource.getInputStream());
                converter.open(stream, stream.getFormat(), mOutputNumSamples);
            }
            catch(IOException                   exception) { }
            catch(UnsupportedAudioFileException exception) { }
        }
    else if(mGenerator instanceof Recorder.Player)
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.