2016-08-22 118 views
1

我正在構建一個應用程序,用於檢測音頻和錄製超過某個聲音閾值的音頻。Android用戶界面停止響應

UI只有一個標籤爲「Listen」的按鈕。當點擊監聽按鈕時,應用按照計劃檢測和錄製音頻(這在Android Studio監視器中顯而易見)運行,並且一旦點擊,監聽按鈕應該變爲「停止」,當然當單擊停止時應用應該停止運行並且該按鈕應該返回到「聽」準備好再次去。

但是,一旦應用程序開始運行的UI凍結並停止響應,按鈕保持爲「收聽」,點擊不會做任何事情,我無法導航到不同的選項卡,我必須終止顯示器在IDE。

任何想法如何解決這個問題?我已經附加了下面的Java和xml代碼。

的Java

public class ListenFragment extends Fragment implements 
    ActivityCompat.OnRequestPermissionsResultCallback { 

private static final int PERMISSION_REQUEST = 0; 
private static final int RECORDER_BPP = 16; 
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav"; 
private static final String AUDIO_RECORDER_FOLDER = "AcousticRecognition"; 
private static final int RECORDER_SAMPLE_RATE = 44100; 
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO; 
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT; 

private AudioRecord recorder = null; 
private int bufferSize = 0; 
private boolean isRecording = false; 
private String audioFilename = null; 
private String wavFilePath = null; 
private String fingerprintFilePath = null; 
private String fn = null; 
private Button listen; 
private View mLayout; 
private LogDBWrapper logDB; 
private FingerprintDBWrapper fingerprintDB; 
private WaveDBWrapper wavDB; 
private Thread detectThread = null; 

/** 
* Default Constructor 
*/ 
public ListenFragment() { 
} 

@Override 
public void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
} 

@Override 
public View onCreateView(LayoutInflater inflater, ViewGroup container, 
         Bundle savedInstanceState) { 
    View view = inflater.inflate(R.layout.fragment_listen, container, false); 


    mLayout = view.findViewById(R.id.record_layout); 
    listen = (Button) view.findViewById(R.id.listen); 
    fingerprintDB = new FingerprintDBWrapper(getActivity()); 
    logDB = new LogDBWrapper(getActivity()); 
    wavDB = new WaveDBWrapper(getActivity()); 

    //set buffer size 
    bufferSize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_MONO, 
      AudioFormat.ENCODING_PCM_16BIT); 

    detectThread = new Thread(new Runnable() { 
     @Override 
     public void run() { 
      record(); 
     } 
    }); 

    //event handlers for record button 
    listen.setOnClickListener(new View.OnClickListener() { 
     public void onClick(View v) { 
      if (!isRecording) { 
       listen.setText(R.string.stop); 
       isRecording = true; 
       detectThread.run(); 
      } else { 
       detectThread.interrupt(); 
       stopRecording(); 
       listen.setText(R.string.listen); 
      } 
     } 
    }); 

    return view; 
} 

private void audioDetect() { 
    // Initialize Audio Recorder. 
    recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 
      RECORDER_SAMPLE_RATE, 
      RECORDER_CHANNELS, 
      RECORDER_AUDIO_ENCODING, 
      bufferSize 
    ); 
    // Start Recording. 
    recorder.startRecording(); 

    int numberOfReadBytes = 0; 
    byte audioBuffer[] = new byte[bufferSize]; 
    boolean recording = false; 
    float tempFloatBuffer[] = new float[3]; 
    int tempIndex = 0; 
    int totalReadBytes = 0; 
    byte totalByteBuffer[] = new byte[60 * 44100 * 2]; 



    // While data come from microphone. 
    while (true) { 
     float totalAbsValue = 0.0f; 
     short sample = 0; 

     numberOfReadBytes = recorder.read(audioBuffer, 0, bufferSize); 

     // Analyze Sound. 
     for (int i = 0; i < bufferSize; i += 2) { 
      sample = (short) ((audioBuffer[i]) | audioBuffer[i + 1] << 8); 
      totalAbsValue += (float) Math.abs(sample)/((float) numberOfReadBytes/(float) 2); 
     } 

     // Analyze temp buffer. 
     tempFloatBuffer[tempIndex % 3] = totalAbsValue; 
     float temp = 0.0f; 
     for (int i = 0; i < 3; ++i) 
      temp += tempFloatBuffer[i]; 

     if ((temp >= 0 && temp <= 350) && !recording) { 
      Log.i("TAG", "1"); 
      tempIndex++; 
      continue; 
     } 

     if (temp > 350 && !recording) { 
      Log.i("TAG", "2"); 
      recording = true; 
     } 

     if ((temp >= 0 && temp <= 350) && recording) { 
      Log.i("TAG", "Save audio to file."); 


      SimpleDateFormat df = new SimpleDateFormat("dd-MM-yyyy-HH-mm-ss"); 

      // Save audio to file. 
      String filepath = Environment.getExternalStorageDirectory().getPath(); 
      Long tsLong = System.currentTimeMillis()/1000; 

      String name = tsLong.toString() + " "+ df.format(new Date()); 

      File file = new File(filepath, AUDIO_RECORDER_FOLDER); 
      if (!file.exists()) 
       file.mkdirs(); 

      fn = file.getAbsolutePath() + "/" + name + AUDIO_RECORDER_FILE_EXT_WAV; 

      logDB.insertData(name, fn); 

      long totalAudioLen = 0; 
      long totalDataLen = totalAudioLen + 36; 
      long longSampleRate = RECORDER_SAMPLE_RATE; 
      int channels = 2; 
      long byteRate = RECORDER_BPP * RECORDER_SAMPLE_RATE * channels/8; 
      totalAudioLen = totalReadBytes; 
      totalDataLen = totalAudioLen + 36; 
      byte finalBuffer[] = new byte[totalReadBytes + 44]; 

      finalBuffer[0] = 'R'; // RIFF/WAVE header 
      finalBuffer[1] = 'I'; 
      finalBuffer[2] = 'F'; 
      finalBuffer[3] = 'F'; 
      finalBuffer[4] = (byte) (totalDataLen & 0xff); 
      finalBuffer[5] = (byte) ((totalDataLen >> 8) & 0xff); 
      finalBuffer[6] = (byte) ((totalDataLen >> 16) & 0xff); 
      finalBuffer[7] = (byte) ((totalDataLen >> 24) & 0xff); 
      finalBuffer[8] = 'W'; 
      finalBuffer[9] = 'A'; 
      finalBuffer[10] = 'V'; 
      finalBuffer[11] = 'E'; 
      finalBuffer[12] = 'f'; // 'fmt ' chunk 
      finalBuffer[13] = 'm'; 
      finalBuffer[14] = 't'; 
      finalBuffer[15] = ' '; 
      finalBuffer[16] = 16; // 4 bytes: size of 'fmt ' chunk 
      finalBuffer[17] = 0; 
      finalBuffer[18] = 0; 
      finalBuffer[19] = 0; 
      finalBuffer[20] = 1; // format = 1 
      finalBuffer[21] = 0; 
      finalBuffer[22] = (byte) channels; 
      finalBuffer[23] = 0; 
      finalBuffer[24] = (byte) (longSampleRate & 0xff); 
      finalBuffer[25] = (byte) ((longSampleRate >> 8) & 0xff); 
      finalBuffer[26] = (byte) ((longSampleRate >> 16) & 0xff); 
      finalBuffer[27] = (byte) ((longSampleRate >> 24) & 0xff); 
      finalBuffer[28] = (byte) (byteRate & 0xff); 
      finalBuffer[29] = (byte) ((byteRate >> 8) & 0xff); 
      finalBuffer[30] = (byte) ((byteRate >> 16) & 0xff); 
      finalBuffer[31] = (byte) ((byteRate >> 24) & 0xff); 
      finalBuffer[32] = (byte) (2 * 16/8); // block align 
      finalBuffer[33] = 0; 
      finalBuffer[34] = RECORDER_BPP; // bits per sample 
      finalBuffer[35] = 0; 
      finalBuffer[36] = 'd'; 
      finalBuffer[37] = 'a'; 
      finalBuffer[38] = 't'; 
      finalBuffer[39] = 'a'; 
      finalBuffer[40] = (byte) (totalAudioLen & 0xff); 
      finalBuffer[41] = (byte) ((totalAudioLen >> 8) & 0xff); 
      finalBuffer[42] = (byte) ((totalAudioLen >> 16) & 0xff); 
      finalBuffer[43] = (byte) ((totalAudioLen >> 24) & 0xff); 

      for (int i = 0; i < totalReadBytes; ++i) 
       finalBuffer[44 + i] = totalByteBuffer[i]; 

      FileOutputStream out; 
      try { 
       out = new FileOutputStream(fn); 
       try { 
        out.write(finalBuffer); 
        out.close(); 
       } catch (IOException e) { 
        // TODO Auto-generated catch block 
        e.printStackTrace(); 
       } 

      } catch (FileNotFoundException e1) { 
       // TODO Auto-generated catch block 
       e1.printStackTrace(); 
      } 

      //*/ 
      tempIndex++; 
      totalReadBytes = 0; 
      recording = false; 

     } 

     // -> Recording sound here. 
     Log.i("TAG", "Recording Sound."); 
     for (int i = 0; i < numberOfReadBytes; i++) { 
      totalByteBuffer[totalReadBytes + i] = audioBuffer[i]; 
     } 
     totalReadBytes += numberOfReadBytes; 

     tempIndex++; 

    } 
} 

private void stopRecording() { 
    if (recorder != null) { 
     isRecording = false; 

     int i = recorder.getState(); 
     if (i == 1) 
      recorder.stop(); 
     recorder.release(); 

     recorder = null; 
    } 
} 

@Override 
public void onDestroyView() { 
    super.onDestroyView(); 
    if (isRecording) { 
     stopRecording(); 
    } 
} 

} 

XML

<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
android:orientation="vertical" 
tools:context=".ListenFragment"> 

<Space 
    android:layout_width="match_parent" 
    android:layout_height="50dp" /> 

<TextView 
    android:layout_width="wrap_content" 
    android:layout_height="wrap_content" 
    android:text="@string/listen" 
    android:textColor="#b1060e" 
    android:textSize="40sp" 
    android:textStyle="bold" 
    android:layout_gravity="center" 
    /> 

<Space 
    android:layout_width="match_parent" 
    android:layout_height="30dp" /> 

<Button 
    android:id="@+id/listen" 
    android:layout_width="wrap_content" 
    android:layout_height="wrap_content" 
    android:layout_gravity="center" 
    android:text="@string/listen" 
    android:background="@drawable/button_style" 
    android:textColor="#fff" /> 

</LinearLayout> 

回答

2

要呼叫detectThread.start().run()。開始啓動一個新的線程。運行只是在當前線程上調用runnable,這將是UI線程使您無響應。

+0

工作完美,非常感謝 – CHarkin