1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "AHAL_StreamRemoteSubmix"
18 #include <android-base/logging.h>
19 #include <audio_utils/clock.h>
20 #include <error/Result.h>
21 #include <error/expected_utils.h>
22
23 #include "core-impl/StreamRemoteSubmix.h"
24
25 using aidl::android::hardware::audio::common::SinkMetadata;
26 using aidl::android::hardware::audio::common::SourceMetadata;
27 using aidl::android::hardware::audio::core::r_submix::SubmixRoute;
28 using aidl::android::media::audio::common::AudioDeviceAddress;
29 using aidl::android::media::audio::common::AudioOffloadInfo;
30 using aidl::android::media::audio::common::MicrophoneDynamicInfo;
31 using aidl::android::media::audio::common::MicrophoneInfo;
32
33 namespace aidl::android::hardware::audio::core {
34
35 using deprecated::InnerStreamWrapper;
36 using deprecated::StreamCommonInterfaceEx;
37 using deprecated::StreamSwitcher;
38
StreamRemoteSubmix(StreamContext * context,const Metadata & metadata,const AudioDeviceAddress & deviceAddress)39 StreamRemoteSubmix::StreamRemoteSubmix(StreamContext* context, const Metadata& metadata,
40 const AudioDeviceAddress& deviceAddress)
41 : StreamCommonImpl(context, metadata),
42 mDeviceAddress(deviceAddress),
43 mIsInput(isInput(metadata)) {
44 mStreamConfig.frameSize = context->getFrameSize();
45 mStreamConfig.format = context->getFormat();
46 mStreamConfig.channelLayout = context->getChannelLayout();
47 mStreamConfig.sampleRate = context->getSampleRate();
48 }
49
~StreamRemoteSubmix()50 StreamRemoteSubmix::~StreamRemoteSubmix() {
51 cleanupWorker();
52 }
53
init(DriverCallbackInterface *)54 ::android::status_t StreamRemoteSubmix::init(DriverCallbackInterface*) {
55 mCurrentRoute = SubmixRoute::findOrCreateRoute(mDeviceAddress, mStreamConfig);
56 if (mCurrentRoute == nullptr) {
57 return ::android::NO_INIT;
58 }
59 if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) {
60 LOG(ERROR) << __func__ << ": invalid stream config";
61 return ::android::NO_INIT;
62 }
63 sp<MonoPipe> sink = mCurrentRoute->getSink();
64 if (sink == nullptr) {
65 LOG(ERROR) << __func__ << ": nullptr sink when opening stream";
66 return ::android::NO_INIT;
67 }
68 if ((!mIsInput || mCurrentRoute->isStreamInOpen()) && sink->isShutdown()) {
69 LOG(DEBUG) << __func__ << ": Shut down sink when opening stream";
70 if (::android::OK != mCurrentRoute->resetPipe()) {
71 LOG(ERROR) << __func__ << ": reset pipe failed";
72 return ::android::NO_INIT;
73 }
74 }
75 mCurrentRoute->openStream(mIsInput);
76 return ::android::OK;
77 }
78
drain(StreamDescriptor::DrainMode)79 ::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) {
80 return ::android::OK;
81 }
82
flush()83 ::android::status_t StreamRemoteSubmix::flush() {
84 // TODO(b/372951987): consider if this needs to be done from 'StreamInWorkerLogic::cycle'.
85 return mIsInput ? standby() : ::android::OK;
86 }
87
pause()88 ::android::status_t StreamRemoteSubmix::pause() {
89 return ::android::OK;
90 }
91
standby()92 ::android::status_t StreamRemoteSubmix::standby() {
93 mCurrentRoute->standby(mIsInput);
94 return ::android::OK;
95 }
96
start()97 ::android::status_t StreamRemoteSubmix::start() {
98 mCurrentRoute->exitStandby(mIsInput);
99 mStartTimeNs = ::android::uptimeNanos();
100 mFramesSinceStart = 0;
101 return ::android::OK;
102 }
103
prepareToClose()104 ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() {
105 if (!mIsInput) {
106 std::shared_ptr<SubmixRoute> route = SubmixRoute::findRoute(mDeviceAddress);
107 if (route != nullptr) {
108 sp<MonoPipe> sink = route->getSink();
109 if (sink == nullptr) {
110 ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
111 }
112 LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink";
113
114 sink->shutdown(true);
115 // The client already considers this stream as closed, release the output end.
116 route->closeStream(mIsInput);
117 } else {
118 LOG(DEBUG) << __func__ << ": stream already closed.";
119 ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
120 }
121 }
122 return ndk::ScopedAStatus::ok();
123 }
124
125 // Remove references to the specified input and output streams. When the device no longer
126 // references input and output streams destroy the associated pipe.
shutdown()127 void StreamRemoteSubmix::shutdown() {
128 mCurrentRoute->closeStream(mIsInput);
129 // If all stream instances are closed, we can remove route information for this port.
130 if (!mCurrentRoute->hasAtleastOneStreamOpen()) {
131 mCurrentRoute->releasePipe();
132 LOG(DEBUG) << __func__ << ": pipe destroyed";
133 SubmixRoute::removeRoute(mDeviceAddress);
134 }
135 mCurrentRoute.reset();
136 }
137
transfer(void * buffer,size_t frameCount,size_t * actualFrameCount,int32_t * latencyMs)138 ::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount,
139 size_t* actualFrameCount, int32_t* latencyMs) {
140 *latencyMs = getDelayInUsForFrameCount(getStreamPipeSizeInFrames()) / 1000;
141 LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms";
142 mCurrentRoute->exitStandby(mIsInput);
143 ::android::status_t status = mIsInput ? inRead(buffer, frameCount, actualFrameCount)
144 : outWrite(buffer, frameCount, actualFrameCount);
145 if ((status != ::android::OK && mIsInput) ||
146 ((status != ::android::OK && status != ::android::DEAD_OBJECT) && !mIsInput)) {
147 return status;
148 }
149 mFramesSinceStart += *actualFrameCount;
150 if (!mIsInput && status != ::android::DEAD_OBJECT) return ::android::OK;
151 // Input streams always need to block, output streams need to block when there is no sink.
152 // When the sink exists, more sophisticated blocking algorithm is implemented by MonoPipe.
153 const long bufferDurationUs =
154 (*actualFrameCount) * MICROS_PER_SECOND / mContext.getSampleRate();
155 const auto totalDurationUs = (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND;
156 const long totalOffsetUs =
157 mFramesSinceStart * MICROS_PER_SECOND / mContext.getSampleRate() - totalDurationUs;
158 LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs;
159 if (totalOffsetUs > 0) {
160 const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs);
161 LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us";
162 usleep(sleepTimeUs);
163 }
164 return ::android::OK;
165 }
166
refinePosition(StreamDescriptor::Position * position)167 ::android::status_t StreamRemoteSubmix::refinePosition(StreamDescriptor::Position* position) {
168 sp<MonoPipeReader> source = mCurrentRoute->getSource();
169 if (source == nullptr) {
170 return ::android::NO_INIT;
171 }
172 const ssize_t framesInPipe = source->availableToRead();
173 if (framesInPipe <= 0) {
174 // No need to update the position frames
175 return ::android::OK;
176 }
177 if (mIsInput) {
178 position->frames += framesInPipe;
179 } else if (position->frames >= framesInPipe) {
180 position->frames -= framesInPipe;
181 }
182 return ::android::OK;
183 }
184
getDelayInUsForFrameCount(size_t frameCount)185 long StreamRemoteSubmix::getDelayInUsForFrameCount(size_t frameCount) {
186 return frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate;
187 }
188
189 // Calculate the maximum size of the pipe buffer in frames for the specified stream.
getStreamPipeSizeInFrames()190 size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() {
191 auto pipeConfig = mCurrentRoute->getPipeConfig();
192 const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize);
193 return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize;
194 }
195
outWrite(void * buffer,size_t frameCount,size_t * actualFrameCount)196 ::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount,
197 size_t* actualFrameCount) {
198 sp<MonoPipe> sink = mCurrentRoute->getSink();
199 if (sink != nullptr) {
200 if (sink->isShutdown()) {
201 sink.clear();
202 if (++mWriteShutdownCount < kMaxErrorLogs) {
203 LOG(DEBUG) << __func__ << ": pipe shutdown, ignoring the write. (limited logging)";
204 }
205 *actualFrameCount = frameCount;
206 return ::android::DEAD_OBJECT; // Induce wait in `transfer`.
207 }
208 } else {
209 LOG(FATAL) << __func__ << ": without a pipe!";
210 return ::android::UNKNOWN_ERROR;
211 }
212 mWriteShutdownCount = 0;
213
214 LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
215 << " frames";
216
217 const bool shouldBlockWrite = mCurrentRoute->shouldBlockWrite();
218 size_t availableToWrite = sink->availableToWrite();
219 // NOTE: sink has been checked above and sink and source life cycles are synchronized
220 sp<MonoPipeReader> source = mCurrentRoute->getSource();
221 // If the write to the sink should be blocked, flush enough frames from the pipe to make space
222 // to write the most recent data.
223 if (!shouldBlockWrite && availableToWrite < frameCount) {
224 static uint8_t flushBuffer[64];
225 const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize;
226 size_t framesToFlushFromSource = frameCount - availableToWrite;
227 LOG(DEBUG) << __func__ << ": flushing " << framesToFlushFromSource
228 << " frames from the pipe to avoid blocking";
229 while (framesToFlushFromSource) {
230 const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames);
231 framesToFlushFromSource -= flushSize;
232 // read does not block
233 source->read(flushBuffer, flushSize);
234 }
235 }
236 availableToWrite = sink->availableToWrite();
237
238 if (!shouldBlockWrite && frameCount > availableToWrite) {
239 LOG(WARNING) << __func__ << ": writing " << availableToWrite << " vs. requested "
240 << frameCount;
241 // Truncate the request to avoid blocking.
242 frameCount = availableToWrite;
243 }
244 ssize_t writtenFrames = sink->write(buffer, frameCount);
245 if (writtenFrames < 0) {
246 if (writtenFrames == (ssize_t)::android::NEGOTIATE) {
247 LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE";
248 sink.clear();
249 *actualFrameCount = 0;
250 return ::android::UNKNOWN_ERROR;
251 } else {
252 // write() returned UNDERRUN or WOULD_BLOCK, retry
253 LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames;
254 writtenFrames = sink->write(buffer, frameCount);
255 }
256 }
257
258 if (writtenFrames < 0) {
259 LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames;
260 *actualFrameCount = 0;
261 return ::android::UNKNOWN_ERROR;
262 }
263 if (writtenFrames > 0 && frameCount > (size_t)writtenFrames) {
264 LOG(WARNING) << __func__ << ": wrote " << writtenFrames << " vs. requested " << frameCount;
265 }
266 *actualFrameCount = writtenFrames;
267 return ::android::OK;
268 }
269
inRead(void * buffer,size_t frameCount,size_t * actualFrameCount)270 ::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount,
271 size_t* actualFrameCount) {
272 // in any case, it is emulated that data for the entire buffer was available
273 memset(buffer, 0, mStreamConfig.frameSize * frameCount);
274 *actualFrameCount = frameCount;
275
276 // about to read from audio source
277 sp<MonoPipeReader> source = mCurrentRoute->getSource();
278 if (source == nullptr) {
279 if (++mReadErrorCount < kMaxErrorLogs) {
280 LOG(ERROR) << __func__
281 << ": no audio pipe yet we're trying to read! (not all errors will be "
282 "logged)";
283 }
284 return ::android::OK;
285 }
286 // get and hold the sink because 'MonoPipeReader' does not hold a strong pointer to it.
287 sp<MonoPipe> sink = mCurrentRoute->getSink();
288 if (sink == nullptr) {
289 if (++mReadErrorCount < kMaxErrorLogs) {
290 LOG(ERROR) << __func__
291 << ": the sink has been released! (not all errors will be logged)";
292 }
293 return ::android::OK;
294 }
295 mReadErrorCount = 0;
296
297 LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
298 << " frames";
299 // read the data from the pipe
300 char* buff = (char*)buffer;
301 size_t actuallyRead = 0;
302 long remainingFrames = frameCount;
303 // Try to wait as long as possible for the audio duration, but leave some time for the call to
304 // 'transfer' to complete. 'kReadAttemptSleepUs' is a good constant for this purpose because it
305 // is by definition "strictly inferior" to the typical buffer duration.
306 const long durationUs =
307 std::max(0L, getDelayInUsForFrameCount(frameCount) - kReadAttemptSleepUs);
308 const int64_t deadlineTimeNs = ::android::uptimeNanos() + durationUs * NANOS_PER_MICROSECOND;
309 while (remainingFrames > 0) {
310 ssize_t framesRead = source->read(buff, remainingFrames);
311 LOG(VERBOSE) << __func__ << ": frames read " << framesRead;
312 if (framesRead > 0) {
313 remainingFrames -= framesRead;
314 buff += framesRead * mStreamConfig.frameSize;
315 LOG(VERBOSE) << __func__ << ": got " << framesRead
316 << " frames, remaining =" << remainingFrames;
317 actuallyRead += framesRead;
318 }
319 if (::android::uptimeNanos() >= deadlineTimeNs) break;
320 if (framesRead <= 0) {
321 LOG(VERBOSE) << __func__ << ": read returned " << framesRead
322 << ", read failure, sleeping for " << kReadAttemptSleepUs << " us";
323 usleep(kReadAttemptSleepUs);
324 }
325 }
326 if (actuallyRead < frameCount) {
327 if (++mReadFailureCount < kMaxReadFailureAttempts) {
328 LOG(WARNING) << __func__ << ": read " << actuallyRead << " vs. requested " << frameCount
329 << " (not all errors will be logged)";
330 }
331 } else {
332 mReadFailureCount = 0;
333 }
334 mCurrentRoute->updateReadCounterFrames(*actualFrameCount);
335 return ::android::OK;
336 }
337
StreamInRemoteSubmix(StreamContext && context,const SinkMetadata & sinkMetadata,const std::vector<MicrophoneInfo> & microphones)338 StreamInRemoteSubmix::StreamInRemoteSubmix(StreamContext&& context,
339 const SinkMetadata& sinkMetadata,
340 const std::vector<MicrophoneInfo>& microphones)
341 : StreamIn(std::move(context), microphones), StreamSwitcher(&mContextInstance, sinkMetadata) {}
342
getActiveMicrophones(std::vector<MicrophoneDynamicInfo> * _aidl_return)343 ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones(
344 std::vector<MicrophoneDynamicInfo>* _aidl_return) {
345 LOG(DEBUG) << __func__ << ": not supported";
346 *_aidl_return = std::vector<MicrophoneDynamicInfo>();
347 return ndk::ScopedAStatus::ok();
348 }
349
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)350 StreamSwitcher::DeviceSwitchBehavior StreamInRemoteSubmix::switchCurrentStream(
351 const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
352 // This implementation effectively postpones stream creation until
353 // receiving the first call to 'setConnectedDevices' with a non-empty list.
354 if (isStubStream()) {
355 if (devices.size() == 1) {
356 auto deviceDesc = devices.front().type;
357 if (deviceDesc.type ==
358 ::aidl::android::media::audio::common::AudioDeviceType::IN_SUBMIX) {
359 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
360 }
361 LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
362 << " not supported";
363 } else {
364 LOG(ERROR) << __func__ << ": Only single device supported.";
365 }
366 return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
367 }
368 return DeviceSwitchBehavior::USE_CURRENT_STREAM;
369 }
370
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)371 std::unique_ptr<StreamCommonInterfaceEx> StreamInRemoteSubmix::createNewStream(
372 const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
373 StreamContext* context, const Metadata& metadata) {
374 return std::unique_ptr<StreamCommonInterfaceEx>(
375 new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
376 }
377
StreamOutRemoteSubmix(StreamContext && context,const SourceMetadata & sourceMetadata,const std::optional<AudioOffloadInfo> & offloadInfo)378 StreamOutRemoteSubmix::StreamOutRemoteSubmix(StreamContext&& context,
379 const SourceMetadata& sourceMetadata,
380 const std::optional<AudioOffloadInfo>& offloadInfo)
381 : StreamOut(std::move(context), offloadInfo),
382 StreamSwitcher(&mContextInstance, sourceMetadata) {}
383
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)384 StreamSwitcher::DeviceSwitchBehavior StreamOutRemoteSubmix::switchCurrentStream(
385 const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
386 // This implementation effectively postpones stream creation until
387 // receiving the first call to 'setConnectedDevices' with a non-empty list.
388 if (isStubStream()) {
389 if (devices.size() == 1) {
390 auto deviceDesc = devices.front().type;
391 if (deviceDesc.type ==
392 ::aidl::android::media::audio::common::AudioDeviceType::OUT_SUBMIX) {
393 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
394 }
395 LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
396 << " not supported";
397 } else {
398 LOG(ERROR) << __func__ << ": Only single device supported.";
399 }
400 return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
401 }
402 return DeviceSwitchBehavior::USE_CURRENT_STREAM;
403 }
404
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)405 std::unique_ptr<StreamCommonInterfaceEx> StreamOutRemoteSubmix::createNewStream(
406 const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
407 StreamContext* context, const Metadata& metadata) {
408 return std::unique_ptr<StreamCommonInterfaceEx>(
409 new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
410 }
411
412 } // namespace aidl::android::hardware::audio::core
413