Question

I have a web service that returns an array of bytes and my intention is to convert said array to a .wav in the client (a handheld such as Blackberry). However I really have no idea of how to do that, I tried just making an FileOutputStream but of course that wouldn't play. So I am once again without knowing what to do. Any ideas?

Was it helpful?

Solution

So, there are LOTS of .WAV formats, here's some documentation:

It's not just a stream of data bytes, but it's close... Just a bit of header and you should be good.

I suppose you could also use something like http://java.sun.com/j2se/1.5.0/docs/api/javax/sound/sampled/spi/AudioFileWriter.html

OTHER TIPS

*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package bemukan.voiceRecognition.speechToText;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.UnsupportedAudioFileException;

/**
 *
 * @author MuhammedYC
 */
public class SplitAudio {
    private int BUFFER_LENGTH=1024;
    private double startTime;
    private double endTime;
    private File sourceFile;
    public SplitAudio(File sourceFile,int startTime,int endTime){
        this.startTime=startTime;
        this.endTime=endTime;
        this.sourceFile = sourceFile;
        AudioInputStream inputAIS = null;
        try {
            inputAIS = AudioSystem.getAudioInputStream(sourceFile);
            Clip clip = AudioSystem.getClip();
            clip.open(inputAIS);
            long totalMicroSecond = clip.getMicrosecondLength();
        } catch (UnsupportedAudioFileException e) {

        } catch (IOException e) {

        } catch (LineUnavailableException e) {

        }
    }
    public void splitAudio(){

        File outputFile = new File("a.wav");
        AudioFileFormat fileFormat = null;
        try {
            fileFormat = AudioSystem.getAudioFileFormat(sourceFile);
            AudioFileFormat.Type targetFileType = fileFormat.getType();
                AudioFormat audioFormat = fileFormat.getFormat();


                AudioInputStream inputAIS = AudioSystem.getAudioInputStream(sourceFile);

                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                int nBufferSize = BUFFER_LENGTH * audioFormat.getFrameSize();
                byte[] abBuffer = new byte[nBufferSize];
                while (true) {

                    int nBytesRead = inputAIS.read(abBuffer);

                    if (nBytesRead == -1) {
                        break;
                    }
                    baos.write(abBuffer, 0, nBytesRead);
                }
                /* Here's the byte array everybody wants.
                 */
                byte[] abAudioData = baos.toByteArray();

               // double baslangic = abBuffer.length * oranBaslangic;
               // double bitis = abBuffer.length * oranSon;

                byte[] splittedAudio = new byte[(int) (endTime - startTime)];
                for (int i = 0; i < (int) (endTime- startTime); i++) {
                    splittedAudio[i] = abAudioData[i + (int) startTime];
                }
                ByteArrayInputStream bais = new ByteArrayInputStream(splittedAudio);
                AudioInputStream outputAIS = new AudioInputStream(bais, audioFormat,
                        splittedAudio.length / audioFormat.getFrameSize());

                AudioSystem.write(outputAIS, targetFileType, outputFile);

        } catch (UnsupportedAudioFileException e) {

        } catch (IOException e) {

        }

    }
}
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top