AudioCapture.ets
9.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
import { audio } from '@kit.AudioKit'
import { ArkTSUtils } from '@kit.ArkTS'
import lazy {getVoicebufferDataLengthByte} from 'neonui'
import process from '@ohos.process'
import lazy { NativeNui } from 'neonui'
let TAG:string = "AudioCapturer"
//音频采集器类,主要用于采集音频,写入文件
export default class AudioCapturer{
static lock_:ArkTSUtils.locks.AsyncLock = new ArkTSUtils.locks.AsyncLock()
//static voiceBuffer2:Int16Array=new Int16Array(0);
static voiceBuffer1:ArrayBuffer[] = [];
//static voiceBuffer:ArrayBuffer[] =[];
static async clearVoiceArrayBuffer(){
await AudioCapturer.lock_.lockAsync( ()=>{
if (AudioCapturer.voiceBuffer1!==undefined){
AudioCapturer.voiceBuffer1= []
}
console.info(`AudioCapturer clearVoiceArrayBuffer, then voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
})
}
static async setVoiceArrayBuffer(voice:ArrayBuffer){
console.info(`AudioCapturer setVoiceArrayBuffer, 1 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
AudioCapturer.lock_.lockAsync( ()=>{
if (AudioCapturer.voiceBuffer1==undefined){
AudioCapturer.voiceBuffer1= []
}
AudioCapturer.voiceBuffer1.push(voice)
console.info(`AudioCapturer setVoiceArrayBuffer, 3 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
})
console.info(`AudioCapturer setVoiceArrayBuffer, 2 voiceBuffer1 size ${AudioCapturer.voiceBuffer1.length}`);
}
static async getVoiceArrayBuffer(buffer:ArrayBuffer):Promise<number>{
let ret:number;
let outbuferleng=buffer.byteLength;
let buffer_out_typedarray = new Int8Array(buffer)
let offset = 0
console.log("AudioCapturer enter getVoiceArrayBuffer");
return await AudioCapturer.lock_.lockAsync( ()=>{
console.info(`AudioCapturer womx getVoiceArrayBuffer, outbuferleng ${buffer.byteLength}.with length ${AudioCapturer.voiceBuffer1.length}`);
// while(AudioRenderer.voiceBuffer1.length <= 0){
// if(AudioRenderer.flagVoiceEnd==true) {
// }
// }
if (AudioCapturer.voiceBuffer1.length > 0) {
let voice_length = getVoicebufferDataLengthByte(AudioCapturer.voiceBuffer1)
if (voice_length>=outbuferleng) {
let bytes_need = outbuferleng
while(bytes_need>0){
let voice1st = AudioCapturer.voiceBuffer1.shift()
if (voice1st==undefined ){
} else {
let out_typedbuffer:Int8Array;
console.info(`AudioCapturer womx voice1st.byteLength=${voice1st.byteLength} vs bytes_need=${bytes_need}`);
if (voice1st.byteLength > bytes_need) {
let out_buffer = voice1st.slice(0,bytes_need)
out_typedbuffer = new Int8Array(out_buffer)
let save_buffer = voice1st.slice(bytes_need)
AudioCapturer.voiceBuffer1.unshift(save_buffer)
} else {
out_typedbuffer = new Int8Array(voice1st)
}
for (let i = 0; i < out_typedbuffer.byteLength; i++) {
buffer_out_typedarray[offset + i] = out_typedbuffer[i]
}
bytes_need -= out_typedbuffer.byteLength
offset += out_typedbuffer.byteLength
console.info(`AudioCapturer womx bytes_need=${bytes_need}`);
}
}
} else {
ret = 0
console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer voicebytes length ${voice_length}`);
}
} else {
ret = 0;
console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer.length ${AudioCapturer.voiceBuffer1.length}<=0`);
}
return ret
})
}
static getVoiceArrayBuffer1(buffer:ArrayBuffer):number{
let ret:number=0;
let outbuferleng=buffer.byteLength;
let buffer_out_typedarray = new Int8Array(buffer)
let offset = 0
console.log("AudioCapturer enter getVoiceArrayBuffer");
console.info(`AudioCapturer womx getVoiceArrayBuffer, outbuferleng ${buffer.byteLength}.with length ${AudioCapturer.voiceBuffer1.length}`);
// while(AudioRenderer.voiceBuffer1.length <= 0){
// if(AudioRenderer.flagVoiceEnd==true) {
// }
// }
if (AudioCapturer.voiceBuffer1.length > 0) {
let voice_length = getVoicebufferDataLengthByte(AudioCapturer.voiceBuffer1)
if (voice_length>=outbuferleng) {
let bytes_need = outbuferleng
while(bytes_need>0){
let voice1st = AudioCapturer.voiceBuffer1.shift()
if (voice1st==undefined ){
break
} else {
let out_typedbuffer:Int8Array;
console.info(`AudioCapturer womx voice1st.byteLength=${voice1st.byteLength} vs bytes_need=${bytes_need}`);
if (voice1st.byteLength > bytes_need) {
let out_buffer = voice1st.slice(0,bytes_need)
out_typedbuffer = new Int8Array(out_buffer)
let save_buffer = voice1st.slice(bytes_need)
AudioCapturer.voiceBuffer1.unshift(save_buffer)
} else {
out_typedbuffer = new Int8Array(voice1st)
}
for (let i = 0; i < out_typedbuffer.byteLength; i++) {
buffer_out_typedarray[offset + i] = out_typedbuffer[i]
}
bytes_need -= out_typedbuffer.byteLength
offset += out_typedbuffer.byteLength
console.info(`AudioCapturer womx bytes_need=${bytes_need}`);
}
}
ret = outbuferleng - bytes_need
} else {
ret = 0
console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer voicebytes length ${voice_length}`);
}
} else {
ret = 0;
console.error(`AudioCapturer error getVoiceArrayBuffer, outbuferleng 0.with ringbuffer.length ${AudioCapturer.voiceBuffer1.length}<=0`);
}
return ret;
}
//定义音频流信息
static audioStreamInfo:audio.AudioStreamInfo = {
//彩样率
samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_16000,
//通道数
channels:audio.AudioChannel.CHANNEL_1,
//采样格式
sampleFormat:audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
//编码格式
encodingType:audio.AudioEncodingType.ENCODING_TYPE_RAW
}
//定义音频采集器信息
static audioCapturerInfo:audio.AudioCapturerInfo={
//音源类型
source:audio.SourceType.SOURCE_TYPE_MIC,
//音频采集器标志
capturerFlags:0
}
//定义音频采集器
static audioCapturer:audio.AudioCapturer
static g_asrinstance:NativeNui
//初始化音频采集器
static async init(asrinstance:NativeNui){
AudioCapturer.g_asrinstance = asrinstance
AudioCapturer.audioCapturer = await audio.createAudioCapturer({
//需要音频流信息和音频采集器信息
streamInfo:AudioCapturer.audioStreamInfo,
capturerInfo:AudioCapturer.audioCapturerInfo
})
if (AudioCapturer.audioCapturer !== undefined) {
await AudioCapturer.audioCapturer.on('readData', AudioCapturer.readDataCallback);
}
}
static readDataCallback = (buffer: ArrayBuffer) => {
console.log(`${TAG} read data bytelength is ${buffer.byteLength}. uid[${process.uid}] pid[${process.pid}] tid[${process.tid}]`);
//AudioCapturer.setVoiceArrayBuffer(buffer)
AudioCapturer.g_asrinstance.updateAudio(buffer,false)
}
//开始采集音频
static async start(){
await AudioCapturer.clearVoiceArrayBuffer()
if (AudioCapturer.audioCapturer) {
let stateGroup = [audio.AudioState.STATE_PREPARED,
audio.AudioState.STATE_PAUSED,
audio.AudioState.STATE_STOPPED];
if (stateGroup.indexOf(AudioCapturer.audioCapturer.state.valueOf()) === -1) {
// 当且仅当状态为STATE_PREPARED、STATE_PAUSED和STATE_STOPPED之一时才能启动采集
console.error(`${TAG}: start failed`);
console.error('Capturer is not STATE_PREPARED or STATE_PAUSED or STATE_STOPPED');
return;
}
//开始录音
await AudioCapturer.audioCapturer.start()
console.log(`${TAG} start done`);
} else {
console.log(`${TAG} start with AudioCapturer.audioCapturer is null`);
return
}
}
//停止采集音频
static async stop(){
if (AudioCapturer.audioCapturer) {
// 只有采集器状态为STATE_RUNNING或STATE_PAUSED的时候才可以停止
if (AudioCapturer.audioCapturer.state === audio.AudioState.STATE_RUNNING ||
AudioCapturer.audioCapturer.state === audio.AudioState.STATE_PAUSED
) {
console.error(`Capturer state is ${AudioCapturer.audioCapturer.state}`);
await AudioCapturer.audioCapturer.stop()/*.then( (value)=>{
console.log("result of Capturer stop is " + value);
}); // 停止采集*/
console.error('Capturer stop done');
} else {
console.error('Capturer is not running or paused');
return;
}
} else {
console.log(`${TAG} stop with AudioCapturer.audioCapturer is null`);
return
}
await AudioCapturer.clearVoiceArrayBuffer()
}
// 释放资源
static async release() {
if (AudioCapturer.audioCapturer) {
// 采集器状态不是STATE_RELEASED或STATE_NEW状态,才能release
if (AudioCapturer.audioCapturer.state === audio.AudioState.STATE_RELEASED ||
AudioCapturer.audioCapturer.state === audio.AudioState.STATE_NEW) {
console.info('Capturer already released');
return;
}
await AudioCapturer.audioCapturer.release()
}
}
}