AudioCapture.ets 9.71 KB
import { audio } from '@kit.AudioKit'
import { ArkTSUtils } from '@kit.ArkTS'
import lazy {getVoicebufferDataLengthByte} from 'neonui'
import process from '@ohos.process'
import lazy { NativeNui } from 'neonui'

let TAG:string = "AudioCapturer"
//音频采集器类,主要用于采集音频,写入文件
export default class AudioCapturer{
  static lock_:ArkTSUtils.locks.AsyncLock = new ArkTSUtils.locks.AsyncLock()
  //static voiceBuffer2:Int16Array=new Int16Array(0);
  static voiceBuffer1:ArrayBuffer[] = [];
  //static voiceBuffer:ArrayBuffer[] =[];
  static async clearVoiceArrayBuffer(){
    await AudioCapturer.lock_.lockAsync( ()=>{
      if (AudioCapturer.voiceBuffer1!==undefined){
        AudioCapturer.voiceBuffer1= []
      }
      console.info(`AudioCapturer clearVoiceArrayBuffer, then voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
    })
  }
  static async setVoiceArrayBuffer(voice:ArrayBuffer){
    console.info(`AudioCapturer setVoiceArrayBuffer, 1 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
    AudioCapturer.lock_.lockAsync( ()=>{
      if (AudioCapturer.voiceBuffer1==undefined){
        AudioCapturer.voiceBuffer1= []
      }
      AudioCapturer.voiceBuffer1.push(voice)
      console.info(`AudioCapturer setVoiceArrayBuffer, 3 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
    })
    console.info(`AudioCapturer setVoiceArrayBuffer, 2 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
  }
  static async getVoiceArrayBuffer(buffer:ArrayBuffer):Promise<number>{
    let ret:number;
    let outbuferleng=buffer.byteLength;
    let buffer_out_typedarray = new Int8Array(buffer)
    let offset = 0
    console.log("AudioCapturer enter getVoiceArrayBuffer");

    return await AudioCapturer.lock_.lockAsync( ()=>{
      console.info(`AudioCapturer womx getVoiceArrayBuffer, outbuferleng ${buffer.byteLength}.with length ${AudioCapturer.voiceBuffer1.length}`);
      // while(AudioRenderer.voiceBuffer1.length <= 0){
      //   if(AudioRenderer.flagVoiceEnd==true) {
      //   }
      // }
      if (AudioCapturer.voiceBuffer1.length > 0) {
        let voice_length = getVoicebufferDataLengthByte(AudioCapturer.voiceBuffer1)
        if (voice_length>=outbuferleng) {
          let bytes_need = outbuferleng
          while(bytes_need>0){
            let voice1st = AudioCapturer.voiceBuffer1.shift()
            if (voice1st==undefined ){
            } else {
              let out_typedbuffer:Int8Array;
              console.info(`AudioCapturer womx voice1st.byteLength=${voice1st.byteLength} vs bytes_need=${bytes_need}`);

              if (voice1st.byteLength > bytes_need) {
                let out_buffer = voice1st.slice(0,bytes_need)
                out_typedbuffer = new Int8Array(out_buffer)

                let save_buffer = voice1st.slice(bytes_need)
                AudioCapturer.voiceBuffer1.unshift(save_buffer)
              } else {
                out_typedbuffer  = new Int8Array(voice1st)
              }

              for (let i = 0; i < out_typedbuffer.byteLength; i++) {
                buffer_out_typedarray[offset + i] = out_typedbuffer[i]
              }
              bytes_need -= out_typedbuffer.byteLength
              offset += out_typedbuffer.byteLength

              console.info(`AudioCapturer womx bytes_need=${bytes_need}`);
            }
          }
        } else {
          ret = 0
          console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer voicebytes length ${voice_length}`);
        }
      } else {
        ret = 0;
        console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer.length ${AudioCapturer.voiceBuffer1.length}<=0`);
      }
      return ret
    })
  }
  static getVoiceArrayBuffer1(buffer:ArrayBuffer):number{
    let ret:number=0;
    let outbuferleng=buffer.byteLength;
    let buffer_out_typedarray = new Int8Array(buffer)
    let offset = 0
    console.log("AudioCapturer enter getVoiceArrayBuffer");

      console.info(`AudioCapturer womx getVoiceArrayBuffer, outbuferleng ${buffer.byteLength}.with length ${AudioCapturer.voiceBuffer1.length}`);
      // while(AudioRenderer.voiceBuffer1.length <= 0){
      //   if(AudioRenderer.flagVoiceEnd==true) {
      //   }
      // }
      if (AudioCapturer.voiceBuffer1.length > 0) {
        let voice_length = getVoicebufferDataLengthByte(AudioCapturer.voiceBuffer1)
        if (voice_length>=outbuferleng) {
          let bytes_need = outbuferleng
          while(bytes_need>0){
            let voice1st = AudioCapturer.voiceBuffer1.shift()
            if (voice1st==undefined ){
              break
            } else {
              let out_typedbuffer:Int8Array;
              console.info(`AudioCapturer womx voice1st.byteLength=${voice1st.byteLength} vs bytes_need=${bytes_need}`);

              if (voice1st.byteLength > bytes_need) {
                let out_buffer = voice1st.slice(0,bytes_need)
                out_typedbuffer = new Int8Array(out_buffer)

                let save_buffer = voice1st.slice(bytes_need)
                AudioCapturer.voiceBuffer1.unshift(save_buffer)
              } else {
                out_typedbuffer  = new Int8Array(voice1st)
              }

              for (let i = 0; i < out_typedbuffer.byteLength; i++) {
                buffer_out_typedarray[offset + i] = out_typedbuffer[i]
              }
              bytes_need -= out_typedbuffer.byteLength
              offset += out_typedbuffer.byteLength

              console.info(`AudioCapturer womx bytes_need=${bytes_need}`);
            }
          }

          ret = outbuferleng - bytes_need
        } else {
          ret = 0
          console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer voicebytes length ${voice_length}`);
        }
      } else {
        ret = 0;
        console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer.length ${AudioCapturer.voiceBuffer1.length}<=0`);
      }
      return ret;
  }
  //定义音频流信息
  static audioStreamInfo:audio.AudioStreamInfo = {
    //彩样率
    samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_16000,
    //通道数
    channels:audio.AudioChannel.CHANNEL_1,
    //采样格式
    sampleFormat:audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
    //编码格式
    encodingType:audio.AudioEncodingType.ENCODING_TYPE_RAW
  }
  //定义音频采集器信息
  static audioCapturerInfo:audio.AudioCapturerInfo={
    //音源类型
    source:audio.SourceType.SOURCE_TYPE_MIC,
    //音频采集器标志
    capturerFlags:0
  }
  //定义音频采集器
  static audioCapturer:audio.AudioCapturer
  static g_asrinstance:NativeNui
  //初始化音频采集器
  static async init(asrinstance:NativeNui){
    AudioCapturer.g_asrinstance = asrinstance
    AudioCapturer.audioCapturer = await audio.createAudioCapturer({
      //需要音频流信息和音频采集器信息
      streamInfo:AudioCapturer.audioStreamInfo,
      capturerInfo:AudioCapturer.audioCapturerInfo
    })
    if (AudioCapturer.audioCapturer !== undefined) {
      await AudioCapturer.audioCapturer.on('readData', AudioCapturer.readDataCallback);
    }

  }

  static  readDataCallback = (buffer: ArrayBuffer) => {
    console.log(`${TAG} read data bytelength is ${buffer.byteLength}. uid[${process.uid}] pid[${process.pid}] tid[${process.tid}]`);
    //AudioCapturer.setVoiceArrayBuffer(buffer)
    AudioCapturer.g_asrinstance.updateAudio(buffer,false)
  }

  //开始采集音频
  static async  start(){
    await AudioCapturer.clearVoiceArrayBuffer()
    if (AudioCapturer.audioCapturer) {
      let stateGroup = [audio.AudioState.STATE_PREPARED,
        audio.AudioState.STATE_PAUSED,
        audio.AudioState.STATE_STOPPED];
      if (stateGroup.indexOf(AudioCapturer.audioCapturer.state.valueOf()) === -1) {
        // 当且仅当状态为STATE_PREPARED、STATE_PAUSED和STATE_STOPPED之一时才能启动采集
        console.error(`${TAG}: start failed`);
        console.error('Capturer is not STATE_PREPARED or STATE_PAUSED or STATE_STOPPED');
        return;
      }

      //开始录音
      await AudioCapturer.audioCapturer.start()
      console.log(`${TAG} start done`);
    } else {
      console.log(`${TAG} start with  AudioCapturer.audioCapturer is null`);
      return
    }
  }

  //停止采集音频
  static async stop(){
    if (AudioCapturer.audioCapturer) {
      // 只有采集器状态为STATE_RUNNING或STATE_PAUSED的时候才可以停止
      if (AudioCapturer.audioCapturer.state === audio.AudioState.STATE_RUNNING ||
        AudioCapturer.audioCapturer.state === audio.AudioState.STATE_PAUSED
      ) {
        console.error(`Capturer state is ${AudioCapturer.audioCapturer.state}`);
        await AudioCapturer.audioCapturer.stop()/*.then( (value)=>{
          console.log("result of Capturer stop is " + value);
        }); // 停止采集*/
        console.error('Capturer stop done');
      } else {
        console.error('Capturer is not running or paused');
        return;
      }
    } else {
      console.log(`${TAG} stop with  AudioCapturer.audioCapturer is null`);
      return
    }
    await AudioCapturer.clearVoiceArrayBuffer()
  }
  // 释放资源
  static async release() {
    if (AudioCapturer.audioCapturer) {

      // 采集器状态不是STATE_RELEASED或STATE_NEW状态,才能release
      if (AudioCapturer.audioCapturer.state === audio.AudioState.STATE_RELEASED ||
        AudioCapturer.audioCapturer.state === audio.AudioState.STATE_NEW) {
        console.info('Capturer already released');
        return;
      }

      await AudioCapturer.audioCapturer.release()
    }
  }
}