1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "StreamHalAidl"
18 //#define LOG_NDEBUG 0
19
20 #include <algorithm>
21 #include <cstdint>
22
23 #include <audio_utils/clock.h>
24 #include <media/AidlConversion.h>
25 #include <media/AidlConversionCore.h>
26 #include <media/AidlConversionCppNdk.h>
27 #include <media/AidlConversionNdk.h>
28 #include <media/AidlConversionUtil.h>
29 #include <media/AudioParameter.h>
30 #include <mediautils/TimeCheck.h>
31 #include <system/audio.h>
32 #include <Utils.h>
33 #include <utils/Log.h>
34
35 #include "AidlUtils.h"
36 #include "DeviceHalAidl.h"
37 #include "EffectHalAidl.h"
38 #include "StreamHalAidl.h"
39
40 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
41 using ::aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
42 using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata;
43 using ::aidl::android::hardware::audio::common::RecordTrackMetadata;
44 using ::aidl::android::hardware::audio::core::IStreamCommon;
45 using ::aidl::android::hardware::audio::core::IStreamIn;
46 using ::aidl::android::hardware::audio::core::IStreamOut;
47 using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
48 using ::aidl::android::hardware::audio::core::StreamDescriptor;
49 using ::aidl::android::hardware::audio::core::VendorParameter;
50 using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
51 using ::aidl::android::media::audio::IHalAdapterVendorExtension;
52
53 /**
54 * Notes on the position handling implementation. First, please consult
55 * "On position reporting" comment in StreamHalInterface.h for the context.
56 *
57 * The adaptation layer for AIDL HALs needs to emulate the HIDL HAL behavior
58 * (that's until some future release when the framework stops supporting HIDL
59 * HALs and it will be possible to remove the code in the framework which
60 * translates resetting positions into continuous) by resetting the reported
61 * position after certain events, depending on the kind of the audio data
62 * stream. Unlike the AIDL interface, the interface between the HAL adaptation
63 * layer and the framework uses separate method calls for controlling the stream
64 * state and retrieving the position. Because of that, the code which implements
65 * position reporting (methods 'getRenderPosition' and 'getObservablePosition')
66 * needs to use stored stream positions which it had at certain state changing
67 * events, like flush or drain. These are stored in the field called
68 * 'mStatePositions'. This field is updated in the code which changes the stream
69 * state. There are two places for that: the 'sendCommand' method, which is used
70 * for all streams, and handlers of asynchronous stream events called
71 * 'onAsync...'.
72 */
73
74 namespace android {
75
76 using HalCommand = StreamDescriptor::Command;
77
78 namespace {
79
80 static constexpr int32_t kAidlVersion1 = 1;
81 static constexpr int32_t kAidlVersion2 = 2;
82 static constexpr int32_t kAidlVersion3 = 3;
83
84 static constexpr const char* kCreateMmapBuffer = "aosp.createMmapBuffer";
85
makeHalCommand()86 template<HalCommand::Tag cmd> HalCommand makeHalCommand() {
87 return HalCommand::make<cmd>(::aidl::android::media::audio::common::Void{});
88 }
makeHalCommand(T data)89 template<HalCommand::Tag cmd, typename T> HalCommand makeHalCommand(T data) {
90 return HalCommand::make<cmd>(data);
91 }
92
93 template <typename MQTypeError>
fmqErrorHandler(const char * mqName)94 auto fmqErrorHandler(const char* mqName) {
95 return [m = std::string(mqName)](MQTypeError fmqError, std::string&& errorMessage) {
96 mediautils::TimeCheck::signalAudioHals();
97 LOG_ALWAYS_FATAL_IF(fmqError != MQTypeError::NONE, "%s: %s",
98 m.c_str(), errorMessage.c_str());
99 };
100 }
101
102 } // namespace
103
104 // static
105 template<class T>
getStreamCommon(const std::shared_ptr<T> & stream)106 std::shared_ptr<IStreamCommon> StreamHalAidl::getStreamCommon(const std::shared_ptr<T>& stream) {
107 std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> streamCommon;
108 if (stream != nullptr) {
109 if (ndk::ScopedAStatus status = stream->getStreamCommon(&streamCommon);
110 !status.isOk()) {
111 ALOGE("%s: failed to retrieve IStreamCommon instance: %s", __func__,
112 status.getDescription().c_str());
113 }
114 }
115 return streamCommon;
116 }
117
StreamHalAidl(std::string_view className,bool isInput,const audio_config & config,int32_t nominalLatency,StreamContextAidl && context,const std::shared_ptr<IStreamCommon> & stream,const std::shared_ptr<IHalAdapterVendorExtension> & vext)118 StreamHalAidl::StreamHalAidl(std::string_view className, bool isInput, const audio_config& config,
119 int32_t nominalLatency, StreamContextAidl&& context,
120 const std::shared_ptr<IStreamCommon>& stream,
121 const std::shared_ptr<IHalAdapterVendorExtension>& vext)
122 : ConversionHelperAidl(className, std::string(isInput ? "in" : "out") + "|ioHandle:" +
123 std::to_string(context.getIoHandle())),
124 mIsInput(isInput),
125 mConfig(configToBase(config)),
126 mContext(std::move(context)),
127 mStream(stream),
128 mVendorExt(vext),
129 mLastReplyLifeTimeNs(
130 std::min(static_cast<size_t>(20),
131 mContext.getBufferDurationMs(mConfig.sample_rate))
132 * NANOS_PER_MILLISECOND)
133 {
134 AUGMENT_LOG(D);
135 {
136 std::lock_guard l(mLock);
137 mLastReply.latencyMs = nominalLatency;
138 }
139 // Instrument audio signal power logging.
140 // Note: This assumes channel mask, format, and sample rate do not change after creation.
141 if (audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
142 /* mStreamPowerLog.isUserDebugOrEngBuild() && */
143 StreamHalAidl::getAudioProperties(&config) == NO_ERROR) {
144 mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
145 }
146
147 if (mStream == nullptr) return;
148
149 mContext.getCommandMQ()->setErrorHandler(
150 fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
151 mContext.getReplyMQ()->setErrorHandler(
152 fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
153 if (mContext.getDataMQ() != nullptr) {
154 mContext.getDataMQ()->setErrorHandler(
155 fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
156 }
157
158 if (auto status = mStream->getInterfaceVersion(&mAidlInterfaceVersion); status.isOk()) {
159 if (mAidlInterfaceVersion > kAidlVersion3) {
160 mSupportsCreateMmapBuffer = true;
161 } else {
162 VendorParameter createMmapBuffer{.id = kCreateMmapBuffer};
163 mSupportsCreateMmapBuffer =
164 mStream->setVendorParameters({createMmapBuffer}, false).isOk();
165 }
166 } else {
167 AUGMENT_LOG(E, "failed to retrieve stream interface version: %s", status.getMessage());
168 }
169 }
170
~StreamHalAidl()171 StreamHalAidl::~StreamHalAidl() {
172 AUGMENT_LOG(D);
173 if (mStream != nullptr) {
174 ndk::ScopedAStatus status = serializeCall(mStream, &Stream::close);
175 AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
176 }
177 }
178
getBufferSize(size_t * size)179 status_t StreamHalAidl::getBufferSize(size_t *size) {
180 AUGMENT_LOG(D);
181 if (size == nullptr) {
182 return BAD_VALUE;
183 }
184 if (mContext.getFrameSizeBytes() == 0 || mContext.getBufferSizeFrames() == 0 ||
185 !mStream) {
186 return NO_INIT;
187 }
188 *size = mContext.getBufferSizeBytes();
189 AUGMENT_LOG(I, "size: %zu", *size);
190 return OK;
191 }
192
getAudioProperties(audio_config_base_t * configBase)193 status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
194 AUGMENT_LOG(D);
195 if (configBase == nullptr) {
196 return BAD_VALUE;
197 }
198 if (!mStream) return NO_INIT;
199 *configBase = mConfig;
200 return OK;
201 }
202
setParameters(const String8 & kvPairs)203 status_t StreamHalAidl::setParameters(const String8& kvPairs) {
204 AUGMENT_LOG(V);
205 TIME_CHECK();
206 if (!mStream) return NO_INIT;
207 AudioParameter parameters(kvPairs);
208 AUGMENT_LOG(D, "parameters: %s", parameters.toString().c_str());
209
210 (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
211 parameters, String8(AudioParameter::keyStreamHwAvSync), [&](int hwAvSyncId) {
212 return statusTFromBinderStatus(
213 serializeCall(mStream, &Stream::updateHwAvSyncId, hwAvSyncId));
214 }));
215 return parseAndSetVendorParameters(mVendorExt, mStream, parameters);
216 }
217
getParameters(const String8 & keys __unused,String8 * values)218 status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
219 AUGMENT_LOG(V);
220 TIME_CHECK();
221 if (!mStream) return NO_INIT;
222 if (values == nullptr) {
223 return BAD_VALUE;
224 }
225 AudioParameter parameterKeys(keys), result;
226 *values = result.toString();
227 return parseAndGetVendorParameters(mVendorExt, mStream, parameterKeys, values);
228 }
229
getFrameSize(size_t * size)230 status_t StreamHalAidl::getFrameSize(size_t *size) {
231 AUGMENT_LOG(D);
232 if (size == nullptr) {
233 return BAD_VALUE;
234 }
235 if (mContext.getFrameSizeBytes() == 0 || !mStream) {
236 return NO_INIT;
237 }
238 *size = mContext.getFrameSizeBytes();
239 return OK;
240 }
241
addEffect(sp<EffectHalInterface> effect)242 status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
243 AUGMENT_LOG(D);
244 TIME_CHECK();
245 if (!mStream) return NO_INIT;
246 if (effect == nullptr) {
247 return BAD_VALUE;
248 }
249 auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
250 return statusTFromBinderStatus(
251 serializeCall(mStream, &Stream::addEffect, aidlEffect->getIEffect()));
252 }
253
removeEffect(sp<EffectHalInterface> effect)254 status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
255 AUGMENT_LOG(D);
256 TIME_CHECK();
257 if (!mStream) return NO_INIT;
258 if (effect == nullptr) {
259 return BAD_VALUE;
260 }
261 auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
262 return statusTFromBinderStatus(
263 serializeCall(mStream, &Stream::removeEffect, aidlEffect->getIEffect()));
264 }
265
standby()266 status_t StreamHalAidl::standby() {
267 AUGMENT_LOG(D);
268 TIME_CHECK();
269 if (!mStream) return NO_INIT;
270 const auto state = getState();
271 StreamDescriptor::Reply reply;
272 switch (state) {
273 case StreamDescriptor::State::ACTIVE:
274 case StreamDescriptor::State::DRAINING:
275 case StreamDescriptor::State::TRANSFERRING:
276 RETURN_STATUS_IF_ERROR(pause(&reply));
277 if (reply.state != StreamDescriptor::State::PAUSED &&
278 reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
279 reply.state != StreamDescriptor::State::TRANSFER_PAUSED &&
280 (state != StreamDescriptor::State::DRAINING ||
281 reply.state != StreamDescriptor::State::IDLE)) {
282 AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
283 toString(reply.state).c_str());
284 return INVALID_OPERATION;
285 }
286 FALLTHROUGH_INTENDED;
287 case StreamDescriptor::State::PAUSED:
288 case StreamDescriptor::State::DRAIN_PAUSED:
289 case StreamDescriptor::State::TRANSFER_PAUSED:
290 if (mIsInput) return flush();
291 RETURN_STATUS_IF_ERROR(flush(&reply));
292 if (reply.state != StreamDescriptor::State::IDLE) {
293 AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
294 toString(reply.state).c_str());
295 return INVALID_OPERATION;
296 }
297 FALLTHROUGH_INTENDED;
298 case StreamDescriptor::State::IDLE:
299 RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
300 &reply, true /*safeFromNonWorkerThread*/));
301 if (reply.state != StreamDescriptor::State::STANDBY) {
302 AUGMENT_LOG(E, "unexpected stream state: %s (expected STANDBY)",
303 toString(reply.state).c_str());
304 return INVALID_OPERATION;
305 }
306 FALLTHROUGH_INTENDED;
307 case StreamDescriptor::State::STANDBY:
308 return OK;
309 default:
310 AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
311 toString(state).c_str());
312 return INVALID_OPERATION;
313 }
314 }
315
dump(int fd,const Vector<String16> & args __unused)316 status_t StreamHalAidl::dump(int fd, const Vector<String16>& args __unused) {
317 AUGMENT_LOG(D);
318 mStreamPowerLog.dump(fd);
319 return OK;
320 }
321
start()322 status_t StreamHalAidl::start() {
323 AUGMENT_LOG(D);
324 TIME_CHECK();
325 if (!mStream) return NO_INIT;
326 if (!mContext.isMmapped()) {
327 return BAD_VALUE;
328 }
329 StreamDescriptor::Reply reply;
330 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
331 switch (reply.state) {
332 case StreamDescriptor::State::STANDBY:
333 RETURN_STATUS_IF_ERROR(
334 sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
335 if (reply.state != StreamDescriptor::State::IDLE) {
336 AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
337 toString(reply.state).c_str());
338 return INVALID_OPERATION;
339 }
340 FALLTHROUGH_INTENDED;
341 case StreamDescriptor::State::IDLE:
342 RETURN_STATUS_IF_ERROR(
343 sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
344 if (reply.state != StreamDescriptor::State::ACTIVE) {
345 AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
346 toString(reply.state).c_str());
347 return INVALID_OPERATION;
348 }
349 FALLTHROUGH_INTENDED;
350 case StreamDescriptor::State::ACTIVE:
351 return OK;
352 case StreamDescriptor::State::DRAINING:
353 RETURN_STATUS_IF_ERROR(
354 sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
355 if (reply.state != StreamDescriptor::State::ACTIVE) {
356 AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
357 toString(reply.state).c_str());
358 return INVALID_OPERATION;
359 }
360 return OK;
361 default:
362 AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
363 toString(reply.state).c_str());
364 return INVALID_OPERATION;
365 }
366 }
367
stop()368 status_t StreamHalAidl::stop() {
369 AUGMENT_LOG(D);
370 TIME_CHECK();
371 if (!mStream) return NO_INIT;
372 if (!mContext.isMmapped()) {
373 return BAD_VALUE;
374 }
375 StreamDescriptor::Reply reply;
376 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
377 if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
378 return drain(false /*earlyNotify*/, nullptr);
379 } else if (state == StreamDescriptor::State::DRAINING) {
380 RETURN_STATUS_IF_ERROR(pause());
381 return flush();
382 } else if (state == StreamDescriptor::State::PAUSED) {
383 return flush();
384 } else if (state != StreamDescriptor::State::IDLE &&
385 state != StreamDescriptor::State::STANDBY) {
386 AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
387 toString(state).c_str());
388 return INVALID_OPERATION;
389 }
390 return OK;
391 }
392
getLatency(uint32_t * latency)393 status_t StreamHalAidl::getLatency(uint32_t *latency) {
394 AUGMENT_LOG(V);
395 if (!mStream) return NO_INIT;
396 StreamDescriptor::Reply reply;
397 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
398 *latency = std::clamp(std::max<int32_t>(0, reply.latencyMs), 1, 3000);
399 AUGMENT_LOG_IF(W, reply.latencyMs != static_cast<int32_t>(*latency),
400 "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
401 *latency);
402 return OK;
403 }
404
getObservablePosition(int64_t * frames,int64_t * timestamp,StatePositions * statePositions)405 status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
406 StatePositions* statePositions) {
407 AUGMENT_LOG(V);
408 if (!mStream) return NO_INIT;
409 StreamDescriptor::Reply reply;
410 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
411 if (reply.observable.frames == StreamDescriptor::Position::UNKNOWN ||
412 reply.observable.timeNs == StreamDescriptor::Position::UNKNOWN) {
413 return INVALID_OPERATION;
414 }
415 *frames = reply.observable.frames;
416 *timestamp = reply.observable.timeNs;
417 return OK;
418 }
419
getHardwarePosition(int64_t * frames,int64_t * timestamp)420 status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
421 AUGMENT_LOG(V);
422 if (!mStream) return NO_INIT;
423 StreamDescriptor::Reply reply;
424 StatePositions statePositions{};
425 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, &statePositions));
426 if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
427 reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
428 AUGMENT_LOG(W, "No position was reported by the HAL");
429 return INVALID_OPERATION;
430 }
431 int64_t mostRecentResetPoint = std::max(statePositions.hardware.framesAtStandby,
432 statePositions.hardware.framesAtFlushOrDrain);
433 int64_t aidlFrames = reply.hardware.frames;
434 *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
435 *timestamp = reply.hardware.timeNs;
436 return OK;
437 }
438
getXruns(int32_t * frames)439 status_t StreamHalAidl::getXruns(int32_t *frames) {
440 AUGMENT_LOG(V);
441 if (!mStream) return NO_INIT;
442 StreamDescriptor::Reply reply;
443 RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
444 if (reply.xrunFrames == StreamDescriptor::Position::UNKNOWN) {
445 return INVALID_OPERATION;
446 }
447 *frames = reply.xrunFrames;
448 return OK;
449 }
450
transfer(void * buffer,size_t bytes,size_t * transferred)451 status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
452 AUGMENT_LOG(V);
453 // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
454 if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
455 mWorkerTid.store(gettid(), std::memory_order_release);
456 // Switch the stream into an active state if needed.
457 // Note: in future we may add support for priming the audio pipeline
458 // with data prior to enabling output (thus we can issue a "burst" command in the "standby"
459 // stream state), however this scenario wasn't supported by the HIDL HAL.
460 if (getState() == StreamDescriptor::State::STANDBY) {
461 StreamDescriptor::Reply reply;
462 RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply));
463 if (reply.state != StreamDescriptor::State::IDLE) {
464 AUGMENT_LOG(E, "failed to get the stream out of standby, actual state: %s",
465 toString(reply.state).c_str());
466 return INVALID_OPERATION;
467 }
468 }
469 if (!mIsInput) {
470 bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
471 }
472 StreamDescriptor::Command burst =
473 StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
474 if (!mIsInput) {
475 if (!mContext.getDataMQ()->write(static_cast<const int8_t*>(buffer), bytes)) {
476 AUGMENT_LOG(E, "failed to write %zu bytes to data MQ", bytes);
477 return NOT_ENOUGH_DATA;
478 }
479 }
480 StreamDescriptor::Reply reply;
481 RETURN_STATUS_IF_ERROR(sendCommand(burst, &reply));
482 *transferred = reply.fmqByteCount;
483 if (mIsInput) {
484 LOG_ALWAYS_FATAL_IF(*transferred > bytes,
485 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
486 __func__, *transferred, bytes);
487 if (auto toRead = mContext.getDataMQ()->availableToRead();
488 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
489 AUGMENT_LOG(E, "failed to read %zu bytes to data MQ", toRead);
490 return NOT_ENOUGH_DATA;
491 }
492 } else if (*transferred > bytes) {
493 ALOGW("%s: HAL module wrote %zu bytes, which exceeds requested count %zu",
494 __func__, *transferred, bytes);
495 *transferred = bytes;
496 }
497 mStreamPowerLog.log(buffer, *transferred);
498 return OK;
499 }
500
pause(StreamDescriptor::Reply * reply)501 status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
502 AUGMENT_LOG(D);
503 TIME_CHECK();
504 if (!mStream) return NO_INIT;
505
506 if (const auto state = getState(); isInPlayOrRecordState(state)) {
507 StreamDescriptor::Reply localReply{};
508 StreamDescriptor::Reply* innerReply = reply ?: &localReply;
509 auto status = sendCommand(
510 makeHalCommand<HalCommand::Tag::pause>(), innerReply,
511 true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first.
512 if (status == STATUS_INVALID_OPERATION &&
513 !isInPlayOrRecordState(innerReply->state)) {
514 /**
515 * In case of transient states like DRAINING, the HAL may change its
516 * StreamDescriptor::State on its own and may not be in synchronization with client.
517 * Thus, client can send the unexpected command and HAL returns failure. such failure is
518 * natural. The client handles it gracefully.
519 * Example where HAL change its state,
520 * 1) DRAINING -> IDLE (on empty buffer)
521 * 2) DRAINING -> IDLE (on IStreamCallback::onDrainReady)
522 **/
523 AUGMENT_LOG(D,
524 "HAL failed to handle the 'pause' command, but stream state is in one of"
525 " the PAUSED kind of states, current state: %s",
526 toString(innerReply->state).c_str());
527 return OK;
528 }
529 return status;
530 } else {
531 AUGMENT_LOG(D, "already stream in one of the PAUSED kind of states, current state: %s",
532 toString(state).c_str());
533 return OK;
534 }
535 }
536
resume(StreamDescriptor::Reply * reply)537 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
538 AUGMENT_LOG(D);
539 TIME_CHECK();
540 if (!mStream) return NO_INIT;
541 if (mIsInput) {
542 return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
543 } else {
544 if (const auto state = getState(); state == StreamDescriptor::State::IDLE) {
545 // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
546 // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
547 StreamDescriptor::Reply localReply{};
548 StreamDescriptor::Reply* innerReply = reply ?: &localReply;
549 RETURN_STATUS_IF_ERROR(
550 sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
551 if (innerReply->state != StreamDescriptor::State::ACTIVE) {
552 AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
553 toString(innerReply->state).c_str());
554 return INVALID_OPERATION;
555 }
556 return OK;
557 } else if (isInPausedState(state)) {
558 return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
559 } else if (isInPlayOrRecordState(state)) {
560 AUGMENT_LOG(D, "already in stream state: %s", toString(state).c_str());
561 return OK;
562 } else {
563 AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
564 toString(state).c_str());
565 return INVALID_OPERATION;
566 }
567 }
568 }
569
drain(bool earlyNotify,StreamDescriptor::Reply * reply)570 status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
571 AUGMENT_LOG(D);
572 TIME_CHECK();
573 if (!mStream) return NO_INIT;
574 return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
575 mIsInput ? StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED :
576 earlyNotify ? StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY :
577 StreamDescriptor::DrainMode::DRAIN_ALL), reply,
578 true /*safeFromNonWorkerThread*/);
579 }
580
flush(StreamDescriptor::Reply * reply)581 status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
582 AUGMENT_LOG(D);
583 TIME_CHECK();
584 if (!mStream) return NO_INIT;
585
586 if (const auto state = getState(); isInPausedState(state)) {
587 return sendCommand(
588 makeHalCommand<HalCommand::Tag::flush>(), reply,
589 true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first.
590 } else if (isInPlayOrRecordState(state)) {
591 AUGMENT_LOG(E, "found stream in non-flushable state: %s", toString(state).c_str());
592 return INVALID_OPERATION;
593 } else {
594 AUGMENT_LOG(D, "already stream in one of the flushable state: current state: %s",
595 toString(state).c_str());
596 return OK;
597 }
598 }
599
exit()600 status_t StreamHalAidl::exit() {
601 AUGMENT_LOG(D);
602 TIME_CHECK();
603 if (!mStream) return NO_INIT;
604 return statusTFromBinderStatus(serializeCall(mStream, &Stream::prepareToClose));
605 }
606
onAsyncTransferReady()607 void StreamHalAidl::onAsyncTransferReady() {
608 StreamDescriptor::State state;
609 {
610 // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
611 // after the reply from the 'burst' command.
612 std::lock_guard l(mCommandReplyLock);
613 state = getState();
614 }
615 bool isCallbackExpected = false;
616 if (state == StreamDescriptor::State::TRANSFERRING) {
617 isCallbackExpected = true;
618 } else if (mContext.hasClipTransitionSupport() && state == StreamDescriptor::State::DRAINING) {
619 std::lock_guard l(mLock);
620 isCallbackExpected = mStatePositions.drainState == StatePositions::DrainState::EN_RECEIVED;
621 if (!isCallbackExpected) {
622 AUGMENT_LOG(W, "drainState %d", static_cast<int>(mStatePositions.drainState));
623 }
624 }
625 if (isCallbackExpected) {
626 // Retrieve the current state together with position counters unconditionally
627 // to ensure that the state on our side gets updated.
628 sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
629 nullptr, true /*safeFromNonWorkerThread */);
630 } else {
631 AUGMENT_LOG(W, "unexpected onTransferReady in the state %s", toString(state).c_str());
632 }
633 }
634
onAsyncDrainReady()635 void StreamHalAidl::onAsyncDrainReady() {
636 StreamDescriptor::State state;
637 {
638 // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
639 // after the reply from the 'drain' command.
640 std::lock_guard l(mCommandReplyLock);
641 state = getState();
642 }
643 if (state == StreamDescriptor::State::DRAINING ||
644 (mContext.hasClipTransitionSupport() &&
645 (state == StreamDescriptor::State::TRANSFERRING ||
646 state == StreamDescriptor::State::IDLE))) {
647 // Retrieve the current state together with position counters unconditionally
648 // to ensure that the state on our side gets updated.
649 sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
650 true /*safeFromNonWorkerThread */);
651 // For compatibility with HIDL behavior, apply a "soft" position reset
652 // after receiving the "drain ready" callback for the clip end.
653 std::lock_guard l(mLock);
654 if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN &&
655 (!mContext.hasClipTransitionSupport() ||
656 (mStatePositions.drainState == StatePositions::DrainState::EN_RECEIVED
657 || mStatePositions.drainState == StatePositions::DrainState::ALL))) {
658 AUGMENT_LOG(D, "setting position %lld as clip end",
659 (long long)mLastReply.observable.frames);
660 mStatePositions.observable.framesAtFlushOrDrain = mLastReply.observable.frames;
661 }
662 mStatePositions.drainState = mStatePositions.drainState == StatePositions::DrainState::EN ?
663 StatePositions::DrainState::EN_RECEIVED : StatePositions::DrainState::NONE;
664 } else {
665 AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
666 }
667 }
668
onAsyncError()669 void StreamHalAidl::onAsyncError() {
670 std::lock_guard l(mLock);
671 AUGMENT_LOG(W, "received in the state %s", toString(mLastReply.state).c_str());
672 mLastReply.state = StreamDescriptor::State::ERROR;
673 }
674
createMmapBuffer(int32_t minSizeFrames __unused,struct audio_mmap_buffer_info * info)675 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
676 struct audio_mmap_buffer_info *info) {
677 AUGMENT_LOG(D);
678 TIME_CHECK();
679 if (!mStream) return NO_INIT;
680 if (!mContext.isMmapped()) {
681 return BAD_VALUE;
682 }
683 if (mSupportsCreateMmapBuffer && (mAidlInterfaceVersion <= kAidlVersion3)) {
684 std::vector<VendorParameter> parameters;
685 RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
686 mStream->getVendorParameters({kCreateMmapBuffer}, ¶meters)));
687 if (parameters.size() == 1) {
688 std::optional<MmapBufferDescriptor> result;
689 RETURN_STATUS_IF_ERROR(parameters[0].ext.getParcelable(&result));
690 mContext.updateMmapBufferDescriptor(std::move(*result));
691 } else {
692 AUGMENT_LOG(E, "invalid output from 'createMmapBuffer' via 'getVendorParameters': %s",
693 internal::ToString(parameters).c_str());
694 return INVALID_OPERATION;
695 }
696 }
697 const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
698 info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
699 info->buffer_size_frames = mContext.getBufferSizeFrames();
700 info->burst_size_frames = bufferDescriptor.burstSizeFrames;
701 info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
702 return OK;
703 }
704
getMmapPosition(struct audio_mmap_position * position)705 status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position) {
706 TIME_CHECK();
707 if (!mStream) return NO_INIT;
708 if (!mContext.isMmapped()) {
709 return BAD_VALUE;
710 }
711 int64_t aidlPosition = 0, aidlTimestamp = 0;
712 RETURN_STATUS_IF_ERROR(getHardwarePosition(&aidlPosition, &aidlTimestamp));
713 position->time_nanoseconds = aidlTimestamp;
714 position->position_frames = static_cast<int32_t>(aidlPosition);
715 return OK;
716 }
717
setHalThreadPriority(int priority __unused)718 status_t StreamHalAidl::setHalThreadPriority(int priority __unused) {
719 // Obsolete, must be done by the HAL module.
720 return OK;
721 }
722
legacyCreateAudioPatch(const struct audio_port_config & port __unused,std::optional<audio_source_t> source __unused,audio_devices_t type __unused)723 status_t StreamHalAidl::legacyCreateAudioPatch(const struct audio_port_config& port __unused,
724 std::optional<audio_source_t> source __unused,
725 audio_devices_t type __unused) {
726 // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
727 return INVALID_OPERATION;
728 }
729
legacyReleaseAudioPatch()730 status_t StreamHalAidl::legacyReleaseAudioPatch() {
731 // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
732 return INVALID_OPERATION;
733 }
734
sendCommand(const::aidl::android::hardware::audio::core::StreamDescriptor::Command & command,::aidl::android::hardware::audio::core::StreamDescriptor::Reply * reply,bool safeFromNonWorkerThread,StatePositions * statePositions)735 status_t StreamHalAidl::sendCommand(
736 const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
737 ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
738 bool safeFromNonWorkerThread, StatePositions* statePositions) {
739
740 // Add timeCheck only for start command (pause, flush checked at caller).
741 std::unique_ptr<mediautils::TimeCheck> timeCheck;
742 if (command.getTag() == StreamDescriptor::Command::start) {
743 timeCheck = mediautils::makeTimeCheckStatsForClassMethodUniquePtr(
744 getClassName(), "sendCommand_start");
745 }
746 // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
747 if (!safeFromNonWorkerThread) {
748 const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
749 LOG_ALWAYS_FATAL_IF(workerTid != gettid(),
750 "%s %s: must be invoked from the worker thread (%d)",
751 __func__, command.toString().c_str(), workerTid);
752 }
753 StreamDescriptor::Reply localReply{};
754 {
755 std::lock_guard l(mCommandReplyLock);
756 if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
757 AUGMENT_LOG(E, "failed to write command %s to MQ", command.toString().c_str());
758 return NOT_ENOUGH_DATA;
759 }
760 if (reply == nullptr) {
761 reply = &localReply;
762 }
763 if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
764 AUGMENT_LOG(E, "failed to read from reply MQ, command %s", command.toString().c_str());
765 return NOT_ENOUGH_DATA;
766 }
767 {
768 std::lock_guard l(mLock);
769 // Not every command replies with 'latencyMs' field filled out, substitute the last
770 // returned value in that case.
771 if (reply->latencyMs <= 0) {
772 reply->latencyMs = mLastReply.latencyMs;
773 }
774 mLastReply = *reply;
775 mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
776 if (!mIsInput && reply->status == STATUS_OK) {
777 if (reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
778 if (command.getTag() == StreamDescriptor::Command::standby &&
779 reply->state == StreamDescriptor::State::STANDBY) {
780 mStatePositions.observable.framesAtStandby = reply->observable.frames;
781 mStatePositions.hardware.framesAtStandby = reply->hardware.frames;
782 } else if (command.getTag() == StreamDescriptor::Command::flush &&
783 reply->state == StreamDescriptor::State::IDLE) {
784 mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
785 mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
786 } else if (!mContext.isAsynchronous() &&
787 command.getTag() == StreamDescriptor::Command::drain &&
788 (reply->state == StreamDescriptor::State::IDLE ||
789 reply->state == StreamDescriptor::State::DRAINING)) {
790 mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
791 mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
792 } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
793 }
794 if (mContext.isAsynchronous() &&
795 command.getTag() == StreamDescriptor::Command::drain) {
796 mStatePositions.drainState =
797 command.get<StreamDescriptor::Command::drain>() ==
798 StreamDescriptor::DrainMode::DRAIN_ALL ?
799 StatePositions::DrainState::ALL : StatePositions::DrainState::EN;
800 }
801 }
802 if (statePositions != nullptr) {
803 *statePositions = mStatePositions;
804 }
805 }
806 }
807 switch (reply->status) {
808 case STATUS_OK: return OK;
809 case STATUS_BAD_VALUE: return BAD_VALUE;
810 case STATUS_INVALID_OPERATION: return INVALID_OPERATION;
811 case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA;
812 default:
813 AUGMENT_LOG(E, "unexpected status %d returned for command %s", reply->status,
814 command.toString().c_str());
815 return INVALID_OPERATION;
816 }
817 }
818
updateCountersIfNeeded(::aidl::android::hardware::audio::core::StreamDescriptor::Reply * reply,StatePositions * statePositions)819 status_t StreamHalAidl::updateCountersIfNeeded(
820 ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
821 StatePositions* statePositions) {
822 bool doUpdate = false;
823 HalCommand cmd;
824 {
825 std::lock_guard l(mLock);
826 doUpdate = uptimeNanos() > mLastReplyExpirationNs;
827 cmd = mContext.isMmapped() && mSupportsCreateMmapBuffer
828 && mLastReply.state == StreamDescriptor::State::ACTIVE
829 ? makeHalCommand<HalCommand::Tag::burst>(0)
830 : makeHalCommand<HalCommand::Tag::getStatus>();
831 }
832 if (doUpdate) {
833 // Since updates are paced, it is OK to perform them from any thread, they should
834 // not interfere with I/O operations of the worker.
835 return sendCommand(cmd, reply, true /*safeFromNonWorkerThread */, statePositions);
836 } else if (reply != nullptr) { // provide cached reply
837 std::lock_guard l(mLock);
838 *reply = mLastReply;
839 if (statePositions != nullptr) {
840 *statePositions = mStatePositions;
841 }
842 }
843 return OK;
844 }
845
846 // static
847 ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata & legacy)848 StreamOutHalAidl::legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy) {
849 ::aidl::android::hardware::audio::common::SourceMetadata aidl;
850 aidl.tracks = VALUE_OR_RETURN(
851 ::aidl::android::convertContainer<std::vector<PlaybackTrackMetadata>>(
852 legacy.tracks,
853 ::aidl::android::legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
854 return aidl;
855 }
856
StreamOutHalAidl(const audio_config & config,StreamContextAidl && context,int32_t nominalLatency,const std::shared_ptr<IStreamOut> & stream,const std::shared_ptr<IHalAdapterVendorExtension> & vext,const sp<CallbackBroker> & callbackBroker)857 StreamOutHalAidl::StreamOutHalAidl(
858 const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
859 const std::shared_ptr<IStreamOut>& stream,
860 const std::shared_ptr<IHalAdapterVendorExtension>& vext,
861 const sp<CallbackBroker>& callbackBroker)
862 : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
863 std::move(context), getStreamCommon(stream), vext),
864 mStream(stream), mCallbackBroker(callbackBroker) {
865 // Initialize the offload metadata
866 mOffloadMetadata.sampleRate = static_cast<int32_t>(config.sample_rate);
867 mOffloadMetadata.channelMask = VALUE_OR_FATAL(
868 ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
869 config.channel_mask, false));
870 mOffloadMetadata.averageBitRatePerSecond = static_cast<int32_t>(config.offload_info.bit_rate);
871 }
872
~StreamOutHalAidl()873 StreamOutHalAidl::~StreamOutHalAidl() {
874 if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
875 broker->clearCallbacks(static_cast<StreamOutHalInterface*>(this));
876 }
877 }
878
setParameters(const String8 & kvPairs)879 status_t StreamOutHalAidl::setParameters(const String8& kvPairs) {
880 if (!mStream) return NO_INIT;
881
882 AudioParameter parameters(kvPairs);
883 AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
884
885 if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
886 AUGMENT_LOG(W, "filtering or updating offload metadata failed: %d", status);
887 }
888
889 return StreamHalAidl::setParameters(parameters.toString());
890 }
891
getLatency(uint32_t * latency)892 status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
893 return StreamHalAidl::getLatency(latency);
894 }
895
setVolume(float left,float right)896 status_t StreamOutHalAidl::setVolume(float left, float right) {
897 AUGMENT_LOG(V, "left %f right %f", left, right);
898 TIME_CHECK();
899 if (!mStream) return NO_INIT;
900 size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
901 if (channelCount == 0) channelCount = 2;
902 std::vector<float> volumes(channelCount);
903 if (channelCount == 1) {
904 volumes[0] = (left + right) / 2;
905 } else {
906 volumes[0] = left;
907 volumes[1] = right;
908 for (size_t i = 2; i < channelCount; ++i) {
909 volumes[i] = (left + right) / 2;
910 }
911 }
912 return statusTFromBinderStatus(serializeCall(mStream, &Stream::setHwVolume, volumes));
913 }
914
selectPresentation(int presentationId,int programId)915 status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
916 TIME_CHECK();
917 if (!mStream) return NO_INIT;
918 return statusTFromBinderStatus(
919 serializeCall(mStream, &Stream::selectPresentation, presentationId, programId));
920 }
921
write(const void * buffer,size_t bytes,size_t * written)922 status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *written) {
923 if (buffer == nullptr || written == nullptr) {
924 return BAD_VALUE;
925 }
926 // For the output scenario, 'transfer' does not modify the buffer.
927 return transfer(const_cast<void*>(buffer), bytes, written);
928 }
929
getRenderPosition(uint64_t * dspFrames)930 status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
931 if (dspFrames == nullptr) {
932 return BAD_VALUE;
933 }
934 int64_t aidlFrames = 0, aidlTimestamp = 0;
935 StatePositions statePositions{};
936 RETURN_STATUS_IF_ERROR(
937 getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
938 // Number of audio frames since the stream has exited standby.
939 // See the table at the start of 'StreamHalInterface' on when it needs to reset.
940 int64_t mostRecentResetPoint;
941 if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
942 mostRecentResetPoint = statePositions.observable.framesAtStandby;
943 } else {
944 mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
945 statePositions.observable.framesAtFlushOrDrain);
946 }
947 *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
948 return OK;
949 }
950
setCallback(wp<StreamOutHalInterfaceCallback> callback)951 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
952 AUGMENT_LOG(D);
953 TIME_CHECK();
954 if (!mStream) return NO_INIT;
955 if (!mContext.isAsynchronous()) {
956 AUGMENT_LOG(E, "the callback is intended for asynchronous streams only");
957 return INVALID_OPERATION;
958 }
959 mClientCallback = callback;
960 return OK;
961 }
962
supportsPauseAndResume(bool * supportsPause,bool * supportsResume)963 status_t StreamOutHalAidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
964 if (supportsPause == nullptr || supportsResume == nullptr) {
965 return BAD_VALUE;
966 }
967 TIME_CHECK();
968 if (!mStream) return NO_INIT;
969 *supportsPause = *supportsResume = true;
970 return OK;
971 }
972
pause()973 status_t StreamOutHalAidl::pause() {
974 return StreamHalAidl::pause();
975 }
976
resume()977 status_t StreamOutHalAidl::resume() {
978 return StreamHalAidl::resume();
979 }
980
supportsDrain(bool * supportsDrain)981 status_t StreamOutHalAidl::supportsDrain(bool *supportsDrain) {
982 if (supportsDrain == nullptr) {
983 return BAD_VALUE;
984 }
985 TIME_CHECK();
986 if (!mStream) return NO_INIT;
987 *supportsDrain = true;
988 return OK;
989 }
990
drain(bool earlyNotify)991 status_t StreamOutHalAidl::drain(bool earlyNotify) {
992 if (!mStream) return NO_INIT;
993
994 if (const auto state = getState();
995 state == StreamDescriptor::State::DRAINING || isInDrainedState(state)) {
996 AUGMENT_LOG(D, "stream already in %s state", toString(state).c_str());
997 if (mContext.isAsynchronous() && isInDrainedState(state)) {
998 onDrainReady();
999 }
1000 return OK;
1001 }
1002
1003 return StreamHalAidl::drain(earlyNotify);
1004 }
1005
flush()1006 status_t StreamOutHalAidl::flush() {
1007 return StreamHalAidl::flush();
1008 }
1009
getPresentationPosition(uint64_t * frames,struct timespec * timestamp)1010 status_t StreamOutHalAidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
1011 if (frames == nullptr || timestamp == nullptr) {
1012 return BAD_VALUE;
1013 }
1014 int64_t aidlFrames = 0, aidlTimestamp = 0;
1015 StatePositions statePositions{};
1016 RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
1017 // See the table at the start of 'StreamHalInterface'.
1018 if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
1019 *frames = aidlFrames;
1020 } else {
1021 const int64_t mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
1022 statePositions.observable.framesAtFlushOrDrain);
1023 *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
1024 }
1025 timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
1026 timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
1027 return OK;
1028 }
1029
presentationComplete()1030 status_t StreamOutHalAidl::presentationComplete() {
1031 AUGMENT_LOG(D);
1032 return OK;
1033 }
1034
updateSourceMetadata(const StreamOutHalInterface::SourceMetadata & sourceMetadata)1035 status_t StreamOutHalAidl::updateSourceMetadata(
1036 const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
1037 TIME_CHECK();
1038 if (!mStream) return NO_INIT;
1039 ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata =
1040 VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata));
1041 return statusTFromBinderStatus(
1042 serializeCall(mStream, &Stream::updateMetadata, aidlMetadata));
1043 }
1044
getDualMonoMode(audio_dual_mono_mode_t * mode)1045 status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
1046 TIME_CHECK();
1047 if (!mStream) return NO_INIT;
1048 if (mode == nullptr) {
1049 return BAD_VALUE;
1050 }
1051 ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode;
1052 RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
1053 serializeCall(mStream, &Stream::getDualMonoMode, &aidlMode)));
1054 *mode = VALUE_OR_RETURN_STATUS(
1055 ::aidl::android::aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(aidlMode));
1056 return OK;
1057 }
1058
setDualMonoMode(audio_dual_mono_mode_t mode)1059 status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
1060 TIME_CHECK();
1061 if (!mStream) return NO_INIT;
1062 ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode = VALUE_OR_RETURN_STATUS(
1063 ::aidl::android::legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode));
1064 return statusTFromBinderStatus(
1065 serializeCall(mStream, &Stream::setDualMonoMode, aidlMode));
1066 }
1067
getAudioDescriptionMixLevel(float * leveldB)1068 status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB) {
1069 TIME_CHECK();
1070 if (!mStream) return NO_INIT;
1071 if (leveldB == nullptr) {
1072 return BAD_VALUE;
1073 }
1074 return statusTFromBinderStatus(
1075 serializeCall(mStream, &Stream::getAudioDescriptionMixLevel, leveldB));
1076 }
1077
setAudioDescriptionMixLevel(float leveldB)1078 status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB) {
1079 TIME_CHECK();
1080 if (!mStream) return NO_INIT;
1081 return statusTFromBinderStatus(
1082 serializeCall(mStream, &Stream::setAudioDescriptionMixLevel, leveldB));
1083 }
1084
getPlaybackRateParameters(audio_playback_rate_t * playbackRate)1085 status_t StreamOutHalAidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
1086 TIME_CHECK();
1087 if (!mStream) return NO_INIT;
1088 if (playbackRate == nullptr) {
1089 return BAD_VALUE;
1090 }
1091 ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate;
1092 RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
1093 serializeCall(mStream, &Stream::getPlaybackRateParameters, &aidlRate)));
1094 *playbackRate = VALUE_OR_RETURN_STATUS(
1095 ::aidl::android::aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(aidlRate));
1096 return OK;
1097 }
1098
setPlaybackRateParameters(const audio_playback_rate_t & playbackRate)1099 status_t StreamOutHalAidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
1100 TIME_CHECK();
1101 if (!mStream) return NO_INIT;
1102 ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate = VALUE_OR_RETURN_STATUS(
1103 ::aidl::android::legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate));
1104 return statusTFromBinderStatus(
1105 serializeCall(mStream, &Stream::setPlaybackRateParameters, aidlRate));
1106 }
1107
setEventCallback(const sp<StreamOutHalInterfaceEventCallback> & callback)1108 status_t StreamOutHalAidl::setEventCallback(
1109 const sp<StreamOutHalInterfaceEventCallback>& callback) {
1110 TIME_CHECK();
1111 if (!mStream) return NO_INIT;
1112 if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
1113 broker->setStreamOutEventCallback(static_cast<StreamOutHalInterface*>(this), callback);
1114 }
1115 return OK;
1116 }
1117
setLatencyMode(audio_latency_mode_t mode)1118 status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode) {
1119 TIME_CHECK();
1120 if (!mStream) return NO_INIT;
1121 ::aidl::android::media::audio::common::AudioLatencyMode aidlMode = VALUE_OR_RETURN_STATUS(
1122 ::aidl::android::legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode));
1123 return statusTFromBinderStatus(serializeCall(mStream, &Stream::setLatencyMode, aidlMode));
1124 };
1125
getRecommendedLatencyModes(std::vector<audio_latency_mode_t> * modes)1126 status_t StreamOutHalAidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
1127 TIME_CHECK();
1128 if (!mStream) return NO_INIT;
1129 if (modes == nullptr) {
1130 return BAD_VALUE;
1131 }
1132 std::vector<::aidl::android::media::audio::common::AudioLatencyMode> aidlModes;
1133 RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
1134 serializeCall(mStream, &Stream::getRecommendedLatencyModes, &aidlModes)));
1135 *modes = VALUE_OR_RETURN_STATUS(
1136 ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
1137 aidlModes,
1138 ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
1139 return OK;
1140 };
1141
setLatencyModeCallback(const sp<StreamOutHalInterfaceLatencyModeCallback> & callback)1142 status_t StreamOutHalAidl::setLatencyModeCallback(
1143 const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
1144 TIME_CHECK();
1145 if (!mStream) return NO_INIT;
1146 if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
1147 broker->setStreamOutLatencyModeCallback(
1148 static_cast<StreamOutHalInterface*>(this), callback);
1149 }
1150 return OK;
1151 };
1152
exit()1153 status_t StreamOutHalAidl::exit() {
1154 return StreamHalAidl::exit();
1155 }
1156
onWriteReady()1157 void StreamOutHalAidl::onWriteReady() {
1158 onAsyncTransferReady();
1159 if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
1160 clientCb->onWriteReady();
1161 }
1162 }
1163
onDrainReady()1164 void StreamOutHalAidl::onDrainReady() {
1165 onAsyncDrainReady();
1166 if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
1167 clientCb->onDrainReady();
1168 }
1169 }
1170
onError(bool isHardError)1171 void StreamOutHalAidl::onError(bool isHardError) {
1172 onAsyncError();
1173 if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
1174 clientCb->onError(isHardError);
1175 }
1176 }
1177
filterAndUpdateOffloadMetadata(AudioParameter & parameters)1178 status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter ¶meters) {
1179 TIME_CHECK();
1180 bool updateMetadata = false;
1181 if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
1182 parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
1183 [&](int value) {
1184 return value >= 0 ?
1185 mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
1186 }))) {
1187 updateMetadata = true;
1188 }
1189 if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
1190 parameters, String8(AudioParameter::keyOffloadCodecSampleRate),
1191 [&](int value) {
1192 return value > 0 ? mOffloadMetadata.sampleRate = value, OK : BAD_VALUE;
1193 }))) {
1194 updateMetadata = true;
1195 }
1196 if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
1197 parameters, String8(AudioParameter::keyOffloadCodecChannels),
1198 [&](int value) -> status_t {
1199 if (value > 0) {
1200 audio_channel_mask_t channel_mask = audio_channel_out_mask_from_count(
1201 static_cast<uint32_t>(value));
1202 if (channel_mask == AUDIO_CHANNEL_INVALID) return BAD_VALUE;
1203 mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
1204 ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
1205 channel_mask, false /*isInput*/));
1206 return OK;
1207 }
1208 return BAD_VALUE;
1209 }))) {
1210 updateMetadata = true;
1211 }
1212 if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
1213 parameters, String8(AudioParameter::keyOffloadCodecDelaySamples),
1214 [&](int value) {
1215 // The legacy keys are misnamed, the value is in frames.
1216 return value >= 0 ? mOffloadMetadata.delayFrames = value, OK : BAD_VALUE;
1217 }))) {
1218 updateMetadata = true;
1219 }
1220 if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
1221 parameters, String8(AudioParameter::keyOffloadCodecPaddingSamples),
1222 [&](int value) {
1223 // The legacy keys are misnamed, the value is in frames.
1224 return value >= 0 ? mOffloadMetadata.paddingFrames = value, OK : BAD_VALUE;
1225 }))) {
1226 updateMetadata = true;
1227 }
1228 if (updateMetadata) {
1229 AUGMENT_LOG(D, "set offload metadata %s", mOffloadMetadata.toString().c_str());
1230 if (status_t status = statusTFromBinderStatus(
1231 serializeCall(mStream, &Stream::updateOffloadMetadata, mOffloadMetadata));
1232 status != OK) {
1233 AUGMENT_LOG(E, "updateOffloadMetadata failed %d", status);
1234 return status;
1235 }
1236 }
1237 return OK;
1238 }
1239
dump(int fd,const Vector<String16> & args)1240 status_t StreamOutHalAidl::dump(int fd, const Vector<String16>& args) {
1241 AUGMENT_LOG(D);
1242 TIME_CHECK();
1243 if (!mStream) return NO_INIT;
1244 Vector<String16> newArgs = args;
1245 newArgs.push(String16(kDumpFromAudioServerArgument));
1246 // Do not serialize the dump call with mCallLock
1247 status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
1248 StreamHalAidl::dump(fd, args);
1249 return status;
1250 }
1251
1252 // static
1253 ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata & legacy)1254 StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) {
1255 ::aidl::android::hardware::audio::common::SinkMetadata aidl;
1256 aidl.tracks = VALUE_OR_RETURN(
1257 ::aidl::android::convertContainer<std::vector<RecordTrackMetadata>>(
1258 legacy.tracks,
1259 ::aidl::android::legacy2aidl_record_track_metadata_v7_RecordTrackMetadata));
1260 return aidl;
1261 }
1262
StreamInHalAidl(const audio_config & config,StreamContextAidl && context,int32_t nominalLatency,const std::shared_ptr<IStreamIn> & stream,const std::shared_ptr<IHalAdapterVendorExtension> & vext,const sp<MicrophoneInfoProvider> & micInfoProvider)1263 StreamInHalAidl::StreamInHalAidl(
1264 const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
1265 const std::shared_ptr<IStreamIn>& stream,
1266 const std::shared_ptr<IHalAdapterVendorExtension>& vext,
1267 const sp<MicrophoneInfoProvider>& micInfoProvider)
1268 : StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency,
1269 std::move(context), getStreamCommon(stream), vext),
1270 mStream(stream), mMicInfoProvider(micInfoProvider) {}
1271
setGain(float gain)1272 status_t StreamInHalAidl::setGain(float gain) {
1273 TIME_CHECK();
1274 if (!mStream) return NO_INIT;
1275 const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask);
1276 std::vector<float> gains(channelCount != 0 ? channelCount : 1, gain);
1277 return statusTFromBinderStatus(serializeCall(mStream, &Stream::setHwGain, gains));
1278 }
1279
read(void * buffer,size_t bytes,size_t * read)1280 status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
1281 if (buffer == nullptr || read == nullptr) {
1282 return BAD_VALUE;
1283 }
1284 return transfer(buffer, bytes, read);
1285 }
1286
getInputFramesLost(uint32_t * framesLost)1287 status_t StreamInHalAidl::getInputFramesLost(uint32_t *framesLost) {
1288 if (framesLost == nullptr) {
1289 return BAD_VALUE;
1290 }
1291 int32_t aidlXruns = 0;
1292 RETURN_STATUS_IF_ERROR(getXruns(&aidlXruns));
1293 *framesLost = std::max<int32_t>(0, aidlXruns);
1294 return OK;
1295 }
1296
getCapturePosition(int64_t * frames,int64_t * time)1297 status_t StreamInHalAidl::getCapturePosition(int64_t *frames, int64_t *time) {
1298 if (frames == nullptr || time == nullptr) {
1299 return BAD_VALUE;
1300 }
1301 return getObservablePosition(frames, time);
1302 }
1303
getActiveMicrophones(std::vector<media::MicrophoneInfoFw> * microphones)1304 status_t StreamInHalAidl::getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) {
1305 if (!microphones) {
1306 return BAD_VALUE;
1307 }
1308 TIME_CHECK();
1309 if (!mStream) return NO_INIT;
1310 sp<MicrophoneInfoProvider> micInfoProvider = mMicInfoProvider.promote();
1311 if (!micInfoProvider) return NO_INIT;
1312 auto staticInfo = micInfoProvider->getMicrophoneInfo();
1313 if (!staticInfo) return INVALID_OPERATION;
1314 std::vector<MicrophoneDynamicInfo> dynamicInfo;
1315 RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
1316 serializeCall(mStream, &Stream::getActiveMicrophones, &dynamicInfo)));
1317 std::vector<media::MicrophoneInfoFw> result;
1318 result.reserve(dynamicInfo.size());
1319 for (const auto& d : dynamicInfo) {
1320 const auto staticInfoIt = std::find_if(staticInfo->begin(), staticInfo->end(),
1321 [&](const auto& s) { return s.id == d.id; });
1322 if (staticInfoIt != staticInfo->end()) {
1323 // Convert into the c++ backend type from the ndk backend type via the legacy structure.
1324 audio_microphone_characteristic_t legacy = VALUE_OR_RETURN_STATUS(
1325 ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
1326 *staticInfoIt, d));
1327 media::MicrophoneInfoFw info = VALUE_OR_RETURN_STATUS(
1328 ::android::legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
1329 legacy));
1330 // Note: info.portId is not filled because it's a bit of framework info.
1331 result.push_back(std::move(info));
1332 } else {
1333 AUGMENT_LOG(E, "no static info for active microphone with id '%s'", d.id.c_str());
1334 }
1335 }
1336 *microphones = std::move(result);
1337 return OK;
1338 }
1339
updateSinkMetadata(const StreamInHalInterface::SinkMetadata & sinkMetadata)1340 status_t StreamInHalAidl::updateSinkMetadata(
1341 const StreamInHalInterface::SinkMetadata& sinkMetadata) {
1342 TIME_CHECK();
1343 if (!mStream) return NO_INIT;
1344 ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata =
1345 VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata));
1346 return statusTFromBinderStatus(
1347 serializeCall(mStream, &Stream::updateMetadata, aidlMetadata));
1348 }
1349
setPreferredMicrophoneDirection(audio_microphone_direction_t direction)1350 status_t StreamInHalAidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
1351 TIME_CHECK();
1352 if (!mStream) return NO_INIT;
1353 ::aidl::android::hardware::audio::core::IStreamIn::MicrophoneDirection aidlDirection =
1354 VALUE_OR_RETURN_STATUS(
1355 ::aidl::android::legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(
1356 direction));
1357 return statusTFromBinderStatus(
1358 serializeCall(mStream, &Stream::setMicrophoneDirection, aidlDirection));
1359 }
1360
setPreferredMicrophoneFieldDimension(float zoom)1361 status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom) {
1362 TIME_CHECK();
1363 if (!mStream) return NO_INIT;
1364 return statusTFromBinderStatus(
1365 serializeCall(mStream, &Stream::setMicrophoneFieldDimension, zoom));
1366 }
1367
dump(int fd,const Vector<String16> & args)1368 status_t StreamInHalAidl::dump(int fd, const Vector<String16>& args) {
1369 AUGMENT_LOG(D);
1370 TIME_CHECK();
1371 if (!mStream) return NO_INIT;
1372 Vector<String16> newArgs = args;
1373 newArgs.push(String16(kDumpFromAudioServerArgument));
1374 // Do not serialize the dump call with mCallLock
1375 status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
1376 StreamHalAidl::dump(fd, args);
1377 return status;
1378 }
1379
1380 } // namespace android
1381