2012-09-10 92 views
0

可能重複:
how to convert or record .wav file in 16khz 16bit mono little-endian?輸出.wav文件的不作爲16000khz 16位採樣率

我曾試圖用下面的代碼。我可以記錄在Android設備的聲音成功記錄,但該文件質量不好...基本上我想與16000khz 16位單聲道little endian format.wave文件..請告訴我在哪裏做錯誤.. 我曾使用google.code androidrecorder2源代碼爲

package com.varma.samples.audiorecorder; 

import java.io.File; 
import java.io.FileInputStream; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 

import android.annotation.SuppressLint; 
import android.app.Activity; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.Bundle; 
import android.os.Environment; 
import android.view.View; 
import android.widget.Button; 

public class RecorderActivity extends Activity { 
    private static final int RECORDER_BPP = 16; 
    private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav"; 
    private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder"; 
    private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw"; 
    private static final int RECORDER_SAMPLERATE = 16000; 
    private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO; 
    private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT; 

    private AudioRecord recorder = null; 
    private int bufferSize = 0; 
    private Thread recordingThread = null; 
    private boolean isRecording = false; 

    @SuppressLint("NewApi") 
    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.main); 

     setButtonHandlers(); 
     enableButtons(false); 

     bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING); 
    } 

    private void setButtonHandlers() { 
     ((Button)findViewById(R.id.btnStart)).setOnClickListener(btnClick); 
     ((Button)findViewById(R.id.btnStop)).setOnClickListener(btnClick); 
    } 

    private void enableButton(int id,boolean isEnable){ 
     ((Button)findViewById(id)).setEnabled(isEnable); 
    } 

    private void enableButtons(boolean isRecording) { 
     enableButton(R.id.btnStart,!isRecording); 
     enableButton(R.id.btnStop,isRecording); 
    } 

    private String getFilename(){ 
     String filepath = Environment.getExternalStorageDirectory().getPath(); 
     File file = new File(filepath,AUDIO_RECORDER_FOLDER); 

     if(!file.exists()){ 
      file.mkdirs(); 
     } 

     return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV); 
    } 

    private String getTempFilename(){ 
     String filepath = Environment.getExternalStorageDirectory().getPath(); 
     File file = new File(filepath,AUDIO_RECORDER_FOLDER); 

     if(!file.exists()){ 
      file.mkdirs(); 
     } 

     File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE); 

     if(tempFile.exists()) 
      tempFile.delete(); 

     return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE); 
    } 

    @SuppressLint({ "NewApi", "NewApi" }) 
    private void startRecording(){ 
     recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 
         RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize); 

     recorder.startRecording(); 

     isRecording = true; 

     recordingThread = new Thread(new Runnable() { 

      @Override 
      public void run() { 
       writeAudioDataToFile(); 
      } 
     },"AudioRecorder Thread"); 

     recordingThread.start(); 
    } 

    @SuppressLint({ "NewApi", "NewApi", "NewApi" }) 
    private void writeAudioDataToFile(){ 
     byte data[] = new byte[bufferSize]; 
     String filename = getTempFilename(); 
     FileOutputStream os = null; 

     try { 
      os = new FileOutputStream(filename); 
     } catch (FileNotFoundException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } 

     int read = 0; 

     if(null != os){ 
      while(isRecording){ 
       read = recorder.read(data, 0, bufferSize); 

       if(AudioRecord.ERROR_INVALID_OPERATION != read){ 
        try { 
         os.write(data); 
        } catch (IOException e) { 
         e.printStackTrace(); 
        } 
       } 
      } 

      try { 
       os.close(); 
      } catch (IOException e) { 
       e.printStackTrace(); 
      } 
     } 
    } 

    @SuppressLint({ "NewApi", "NewApi" }) 
    private void stopRecording(){ 
     if(null != recorder){ 
      isRecording = false; 

      recorder.stop(); 
      recorder.release(); 

      recorder = null; 
      recordingThread = null; 
     } 

     copyWaveFile(getTempFilename(),getFilename()); 
     deleteTempFile(); 
    } 

    private void deleteTempFile() { 
     File file = new File(getTempFilename()); 

     file.delete(); 
    } 

    private void copyWaveFile(String inFilename,String outFilename){ 
     FileInputStream in = null; 
     FileOutputStream out = null; 
     long totalAudioLen = 0; 
     long totalDataLen = totalAudioLen + 36; 
     long longSampleRate = RECORDER_SAMPLERATE; 
     int channels = 2; 
     long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8; 

     byte[] data = new byte[bufferSize]; 

     try { 
      in = new FileInputStream(inFilename); 
      out = new FileOutputStream(outFilename); 
      totalAudioLen = in.getChannel().size(); 
      totalDataLen = totalAudioLen + 36; 

      AppLog.logString("File size: " + totalDataLen); 

      WriteWaveFileHeader(out, totalAudioLen, totalDataLen, 
        longSampleRate, channels, byteRate); 

      while(in.read(data) != -1){ 
       out.write(data); 
      } 

      in.close(); 
      out.close(); 
     } catch (FileNotFoundException e) { 
      e.printStackTrace(); 
     } catch (IOException e) { 
      e.printStackTrace(); 
     } 
    } 

    private void WriteWaveFileHeader(
      FileOutputStream out, long totalAudioLen, 
      long totalDataLen, long longSampleRate, int channels, 
      long byteRate) throws IOException { 

     byte[] header = new byte[44]; 

     header[0] = 'R'; // RIFF/WAVE header 
     header[1] = 'I'; 
     header[2] = 'F'; 
     header[3] = 'F'; 
     header[4] = (byte) (totalDataLen & 0xff); 
     header[5] = (byte) ((totalDataLen >> 8) & 0xff); 
     header[6] = (byte) ((totalDataLen >> 16) & 0xff); 
     header[7] = (byte) ((totalDataLen >> 24) & 0xff); 
     header[8] = 'W'; 
     header[9] = 'A'; 
     header[10] = 'V'; 
     header[11] = 'E'; 
     header[12] = 'f'; // 'fmt ' chunk 
     header[13] = 'm'; 
     header[14] = 't'; 
     header[15] = ' '; 
     header[16] = 16; // 4 bytes: size of 'fmt ' chunk 
     header[17] = 0; 
     header[18] = 0; 
     header[19] = 0; 
     header[20] = 1; // format = 1 
     header[21] = 0; 
     header[22] = (byte) channels; 
     header[23] = 0; 
     header[24] = (byte) (longSampleRate & 0xff); 
     header[25] = (byte) ((longSampleRate >> 8) & 0xff); 
     header[26] = (byte) ((longSampleRate >> 16) & 0xff); 
     header[27] = (byte) ((longSampleRate >> 24) & 0xff); 
     header[28] = (byte) (byteRate & 0xff); 
     header[29] = (byte) ((byteRate >> 8) & 0xff); 
     header[30] = (byte) ((byteRate >> 16) & 0xff); 
     header[31] = (byte) ((byteRate >> 24) & 0xff); 
     header[32] = (byte) (2 * 16/8); // block align 
     header[33] = 0; 
     header[34] = RECORDER_BPP; // bits per sample 
     header[35] = 0; 
     header[36] = 'd'; 
     header[37] = 'a'; 
     header[38] = 't'; 
     header[39] = 'a'; 
     header[40] = (byte) (totalAudioLen & 0xff); 
     header[41] = (byte) ((totalAudioLen >> 8) & 0xff); 
     header[42] = (byte) ((totalAudioLen >> 16) & 0xff); 
     header[43] = (byte) ((totalAudioLen >> 24) & 0xff); 

     out.write(header, 0, 44); 
    } 

    private View.OnClickListener btnClick = new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      switch(v.getId()){ 
       case R.id.btnStart:{ 
        AppLog.logString("Start Recording"); 

        enableButtons(true); 
        startRecording(); 

        break; 
       } 
       case R.id.btnStop:{ 
        AppLog.logString("Start Recording"); 

        enableButtons(false); 
        stopRecording(); 

        break; 
       } 
      } 
     } 
    }; 
} 

wav文件的輸出中是這個鏈接..

http://www.filefactory.com/file/20ibb9itlal1/n/1347267058768.wav

我剛剛發現一個問題是,輸出中的文件仍處於立體聲foramt.i ahve一個WAV轉換軟件windows7,當我添加我的輸出文件,它顯示我的房間是在立體聲...我從來沒有給channel_configuration_stereo選項仍然是爲什麼?

請請指導me.its一個星期讀音字試圖理清.. thanx提前

+0

嘗試'CHANNEL_IN_MONO'。 –

+0

thanx但我試過,但我仍然越來越糟糕質量慢動作播放wav聲音..wht現在是文件顯示我,它有16000赫茲和mono.but但當我從Android設備記錄和當我用相同配置的大膽軟件錄製語音時,爲什麼它仍然沒有很好的格式? –

回答

1

在你copyWaveFile功能,可以設置channels = 2;

這被寫入到標題中WriteWaveFileHeader()

兩件事情.WAV文件播放時,我發現有用的是:

  • 十六進制編輯器來查看與頭,
  • this page描述它應該是什麼樣子。
+1

但如果我讓它的頻道= 1那麼它的質量變得非常低。它給我16000hz與706kbps的比特率和單聲道格式,但這個wav文件播放像它在慢動作播放......爲什麼發生這樣的事情 –

+0

http:// www.filefactory.com/file/55sbx2log443/n/1347355143822.wav ..更改頻道後= 1它給我單聲道,但文件不qood質量...你可以聽到我的輸出波形文件與上述鏈接 –

+0

它好像你的實際數據仍然在兩個頻道(其中一個是空的)。也許你可以試試邁克爾伯爾的建議嗎? – Shitesh