1# Developing Audio Call 2 3During an audio call, audio output (playing the peer voice) and audio input (recording the local voice) are carried out simultaneously. You can use the AudioRenderer to implement audio output and the AudioCapturer to implement audio input. 4 5Before starting or stopping using the audio call service, the application needs to check the [audio scene](audio-call-overview.md#audio-scene) and [ringer mode](audio-call-overview.md#ringer-mode) to adopt proper audio management and prompt policies. 6 7The sample code below demonstrates the basic process of using the AudioRenderer and AudioCapturer to implement the audio call service, without the process of call data transmission. In actual development, the peer call data transmitted over the network needs to be decoded and played, and the sample code uses the process of reading an audio file instead; the local call data needs to be encoded and packed and then sent to the peer over the network, and the sample code uses the process of writing an audio file instead. 8 9## Using AudioRenderer to Play the Peer Voice 10 11This process is similar to the process of [using AudioRenderer to develop audio playback](using-audiorenderer-for-playback.md). The key differences lie in the **audioRendererInfo** parameter and audio data source. In the **audioRendererInfo** parameter used for audio calling, **content** must be set to **CONTENT_TYPE_SPEECH**, and **usage** must be set to **STREAM_USAGE_VOICE_COMMUNICATION**. 12 13```ts 14import { audio } from '@kit.AudioKit'; 15import { fileIo as fs } from '@kit.CoreFileKit'; 16import { BusinessError } from '@kit.BasicServicesKit'; 17import { common } from '@kit.AbilityKit'; 18 19const TAG = 'VoiceCallDemoForAudioRenderer'; 20// The process is similar to the process of using AudioRenderer to develop audio playback. The key differences lie in the audioRendererInfo parameter and audio data source. 21class Options { 22 offset?: number; 23 length?: number; 24} 25 26let bufferSize: number = 0; 27let renderModel: audio.AudioRenderer | undefined = undefined; 28let audioStreamInfo: audio.AudioStreamInfo = { 29 samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, // Sampling rate. 30 channels: audio.AudioChannel.CHANNEL_2, // Channel. 31 sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // Sampling format. 32 encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW // Encoding format. 33}; 34let audioRendererInfo: audio.AudioRendererInfo = { 35 // Set the parameters related to the call scenario. 36 usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, // Audio stream usage type: VoIP call. 37 rendererFlags: 0 // AudioRenderer flag. The default value is 0. 38}; 39let audioRendererOptions: audio.AudioRendererOptions = { 40 streamInfo: audioStreamInfo, 41 rendererInfo: audioRendererInfo 42}; 43// Obtain the context from the component and ensure that the return value of this.getUIContext().getHostContext() is UIAbilityContext. 44let context = this.getUIContext().getHostContext() as common.UIAbilityContext; 45let path = context.cacheDir; 46// Ensure that the resource exists in the sandbox path. 47let filePath = path + '/StarWars10s-2C-48000-4SW.pcm'; 48let file: fs.File = fs.openSync(filePath, fs.OpenMode.READ_ONLY); 49let writeDataCallback = (buffer: ArrayBuffer) => { 50 let options: Options = { 51 offset: bufferSize, 52 length: buffer.byteLength 53 }; 54 fs.readSync(file.fd, buffer, options); 55 bufferSize += buffer.byteLength; 56}; 57 58// Create an AudioRenderer instance, and set the events to listen for. 59audio.createAudioRenderer(audioRendererOptions, (err: BusinessError, renderer: audio.AudioRenderer) => { // Create an AudioRenderer instance. 60 if (!err) { 61 console.info(`${TAG}: creating AudioRenderer success`); 62 renderModel = renderer; 63 if (renderModel !== undefined) { 64 renderModel.on('stateChange', (state: audio.AudioState) => { // Set the events to listen for. A callback is invoked when the AudioRenderer is switched to the specified state. 65 if (state == 1) { 66 console.info('audio renderer state is: STATE_PREPARED'); 67 } 68 if (state == 2) { 69 console.info('audio renderer state is: STATE_RUNNING'); 70 } 71 }); 72 renderModel.on('markReach', 1000, (position: number) => { // Subscribe to the markReach event. A callback is triggered when the number of rendered frames reaches 1000. 73 if (position == 1000) { 74 console.info('ON Triggered successfully'); 75 } 76 }); 77 renderModel.on('writeData', writeDataCallback); 78 } 79 } else { 80 console.info(`${TAG}: creating AudioRenderer failed, error: ${err.message}`); 81 } 82}); 83 84// Start audio rendering. 85async function start() { 86 if (renderModel !== undefined) { 87 let stateGroup: number[] = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED]; 88 if (stateGroup.indexOf(renderModel.state.valueOf()) === -1) { // Rendering can be started only when the AudioRenderer is in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state. 89 console.error(TAG + 'start failed'); 90 return; 91 } 92 renderModel.start((err: BusinessError) => { 93 if (err) { 94 console.error('Renderer start failed.'); 95 } else { 96 console.info('Renderer start success.'); 97 } 98 }); 99 } 100} 101 102// Pause the rendering. 103async function pause() { 104 if (renderModel !== undefined) { 105 // Rendering can be paused only when the AudioRenderer is in the STATE_RUNNING state. 106 if (renderModel.state.valueOf() !== audio.AudioState.STATE_RUNNING) { 107 console.info('Renderer is not running'); 108 return; 109 } 110 await renderModel.pause(); // Pause rendering. 111 if (renderModel.state.valueOf() === audio.AudioState.STATE_PAUSED) { 112 console.info('Renderer is paused.'); 113 } else { 114 console.error('Pausing renderer failed.'); 115 } 116 } 117} 118 119// Stop rendering. 120async function stop() { 121 if (renderModel !== undefined) { 122 // The AudioRenderer can be stopped only when it is in the STATE_RUNNING or STATE_PAUSED state. 123 if (renderModel.state.valueOf() !== audio.AudioState.STATE_RUNNING && renderModel.state.valueOf() !== audio.AudioState.STATE_PAUSED) { 124 console.info('Renderer is not running or paused.'); 125 return; 126 } 127 await renderModel.stop(); // Stop rendering. 128 if (renderModel.state.valueOf() === audio.AudioState.STATE_STOPPED) { 129 console.info('Renderer stopped.'); 130 } else { 131 console.error('Stopping renderer failed.'); 132 } 133 } 134} 135 136// Release the instance. 137async function release() { 138 if (renderModel !== undefined) { 139 // The AudioRenderer can be released only when it is not in the STATE_RELEASED state. 140 if (renderModel.state.valueOf() === audio.AudioState.STATE_RELEASED) { 141 console.info('Renderer already released'); 142 return; 143 } 144 await renderModel.release(); // Release the instance. 145 if (renderModel.state.valueOf() === audio.AudioState.STATE_RELEASED) { 146 console.info('Renderer released'); 147 } else { 148 console.error('Renderer release failed.'); 149 } 150 } 151} 152``` 153 154## Using AudioCapturer to Record the Local Voice 155 156This process is similar to the process of [using AudioCapturer to develop audio recording](using-audiocapturer-for-recording.md). The key differences lie in the **audioCapturerInfo** parameter and audio data stream direction. In the **audioCapturerInfo** parameter used for audio calling, **source** must be set to **SOURCE_TYPE_VOICE_COMMUNICATION**. 157 158You must request the ohos.permission.MICROPHONE permission for all recording tasks. For details, see [Requesting User Authorization](../../security/AccessToken/request-user-authorization.md). 159 160```ts 161import { audio } from '@kit.AudioKit'; 162import { fileIo as fs } from '@kit.CoreFileKit'; 163import { BusinessError } from '@kit.BasicServicesKit'; 164import { common } from '@kit.AbilityKit'; 165 166const TAG = 'VoiceCallDemoForAudioCapturer'; 167class Options { 168 offset?: number; 169 length?: number; 170} 171 172// The process is similar to the process of using AudioCapturer to develop audio recording. The key differences lie in the audioCapturerInfo parameter and audio data stream direction. 173let bufferSize: number = 0; 174let audioCapturer: audio.AudioCapturer | undefined = undefined; 175let audioStreamInfo: audio.AudioStreamInfo = { 176 samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, // Sampling rate. 177 channels: audio.AudioChannel.CHANNEL_2, // Channel. 178 sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // Sampling format. 179 encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW // Encoding format. 180}; 181let audioCapturerInfo: audio.AudioCapturerInfo = { 182 // Set the parameters related to the call scenario. 183 source: audio.SourceType.SOURCE_TYPE_VOICE_COMMUNICATION, // Audio source type: voice communication. 184 capturerFlags: 0 // AudioCapturer flag. The default value is 0. 185}; 186let audioCapturerOptions: audio.AudioCapturerOptions = { 187 streamInfo: audioStreamInfo, 188 capturerInfo: audioCapturerInfo 189}; 190// Obtain the context from the component and ensure that the return value of this.getUIContext().getHostContext() is UIAbilityContext. 191let context = this.getUIContext().getHostContext() as common.UIAbilityContext; 192let path = context.cacheDir; 193let filePath = path + '/StarWars10s-2C-48000-4SW.pcm'; 194let file: fs.File = fs.openSync(filePath, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE); 195let readDataCallback = (buffer: ArrayBuffer) => { 196 let options: Options = { 197 offset: bufferSize, 198 length: buffer.byteLength 199 }; 200 fs.writeSync(file.fd, buffer, options); 201 bufferSize += buffer.byteLength; 202}; 203 204// Create an AudioRenderer instance, and set the events to listen for. 205async function init() { 206 audio.createAudioCapturer(audioCapturerOptions, (err: BusinessError, capturer: audio.AudioCapturer) => { // Create an AudioCapturer instance. 207 if (err) { 208 console.error(`Invoke createAudioCapturer failed, code is ${err.code}, message is ${err.message}`); 209 return; 210 } 211 console.info(`${TAG}: create AudioCapturer success`); 212 audioCapturer = capturer; 213 if (audioCapturer !== undefined) { 214 audioCapturer.on('markReach', 1000, (position: number) => { // Subscribe to the markReach event. A callback is triggered when the number of captured frames reaches 1000. 215 if (position === 1000) { 216 console.info('ON Triggered successfully'); 217 } 218 }); 219 audioCapturer.on('periodReach', 2000, (position: number) => { // Subscribe to the periodReach event. A callback is triggered each time when the number of captured frames reaches 2000. 220 if (position === 2000) { 221 console.info('ON Triggered successfully'); 222 } 223 }); 224 audioCapturer.on('readData', readDataCallback); 225 } 226 }); 227} 228 229// Start audio recording. 230async function start() { 231 if (audioCapturer !== undefined) { 232 let stateGroup: number[] = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED]; 233 if (stateGroup.indexOf(audioCapturer.state.valueOf()) === -1) { // Recording can be started only when the AudioCapturer is in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state. 234 console.error(`${TAG}: start failed`); 235 return; 236 } 237 audioCapturer.start((err: BusinessError) => { 238 if (err) { 239 console.error('Capturer start failed.'); 240 } else { 241 console.info('Capturer start success.'); 242 } 243 }); 244 } 245} 246 247// Stop recording. 248async function stop() { 249 if (audioCapturer !== undefined) { 250 // The AudioCapturer can be stopped only when it is in the STATE_RUNNING or STATE_PAUSED state. 251 if (audioCapturer.state.valueOf() !== audio.AudioState.STATE_RUNNING && audioCapturer.state.valueOf() !== audio.AudioState.STATE_PAUSED) { 252 console.info('Capturer is not running or paused'); 253 return; 254 } 255 await audioCapturer.stop(); // Stop recording. 256 if (audioCapturer.state.valueOf() === audio.AudioState.STATE_STOPPED) { 257 console.info('Capturer stopped'); 258 } else { 259 console.error('Capturer stop failed'); 260 } 261 } 262} 263 264// Release the instance. 265async function release() { 266 if (audioCapturer !== undefined) { 267 // The AudioCapturer can be released only when it is not in the STATE_RELEASED or STATE_NEW state. 268 if (audioCapturer.state.valueOf() === audio.AudioState.STATE_RELEASED || audioCapturer.state.valueOf() === audio.AudioState.STATE_NEW) { 269 console.info('Capturer already released'); 270 return; 271 } 272 await audioCapturer.release(); // Release the instance. 273 if (audioCapturer.state.valueOf() === audio.AudioState.STATE_RELEASED) { 274 console.info('Capturer released'); 275 } else { 276 console.error('Capturer release failed'); 277 } 278 } 279} 280``` 281