• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2021, The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "SpatializerPoseController.h"
18 #include <android-base/stringprintf.h>
19 #include <chrono>
20 #include <cstdint>
21 #include <string>
22 
23 #define LOG_TAG "SpatializerPoseController"
24 //#define LOG_NDEBUG 0
25 #include <cutils/properties.h>
26 #include <sensor/Sensor.h>
27 #include <media/MediaMetricsItem.h>
28 #include <media/QuaternionUtil.h>
29 #include <utils/Log.h>
30 #include <utils/SystemClock.h>
31 
32 namespace android {
33 
34 using media::createHeadTrackingProcessor;
35 using media::HeadTrackingMode;
36 using media::HeadTrackingProcessor;
37 using media::Pose3f;
38 using media::SensorPoseProvider;
39 using media::Twist3f;
40 
41 using namespace std::chrono_literals;
42 
43 namespace {
44 
45 // This is how fast, in m/s, we allow position to shift during rate-limiting.
46 constexpr float kMaxTranslationalVelocity = 2;
47 
48 // This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
49 constexpr float kMaxRotationalVelocity = 0.8f;
50 
51 // This is how far into the future we predict the head pose.
52 // The prediction duration should be based on the actual latency from
53 // head-tracker to audio output, though setting the prediction duration too
54 // high may result in higher prediction errors when the head accelerates or
55 // decelerates (changes velocity).
56 //
57 // The head tracking predictor will do a best effort to achieve the requested
58 // prediction duration.  If the duration is too far in the future based on
59 // current sensor variance, the predictor may internally restrict duration to what
60 // is achievable with reasonable confidence as the "best prediction".
61 constexpr auto kPredictionDuration = 120ms;
62 
63 // After not getting a pose sample for this long, we would treat the measurement as stale.
64 // The max connection interval is 50ms, and HT sensor event interval can differ depending on the
65 // sampling rate, scheduling, sensor eventQ FIFO etc. 120 (2 * 50 + 20) ms seems reasonable for now.
66 constexpr auto kFreshnessTimeout = 120ms;
67 
68 // Auto-recenter kicks in after the head has been still for this long.
69 constexpr auto kAutoRecenterWindowDuration = 6s;
70 
71 // Auto-recenter considers head not still if translated by this much (in meters, approx).
72 constexpr float kAutoRecenterTranslationThreshold = 0.1f;
73 
74 // Auto-recenter considers head not still if rotated by this much (in radians, approx).
75 constexpr float kAutoRecenterRotationThreshold = 10.5f / 180 * M_PI;
76 
77 // Screen is considered to be unstable (not still) if it has moved significantly within the last
78 // time window of this duration.
79 constexpr auto kScreenStillnessWindowDuration = 750ms;
80 
81 // Screen is considered to have moved significantly if translated by this much (in meter, approx).
82 constexpr float kScreenStillnessTranslationThreshold = 0.1f;
83 
84 // Screen is considered to have moved significantly if rotated by this much (in radians, approx).
85 constexpr float kScreenStillnessRotationThreshold = 15.0f / 180 * M_PI;
86 
87 // Time units for system clock ticks. This is what the Sensor Framework timestamps represent and
88 // what we use for pose filtering.
89 using Ticks = std::chrono::nanoseconds;
90 
91 // How many ticks in a second.
92 constexpr auto kTicksPerSecond = Ticks::period::den;
93 
getSensorMetricsId(int32_t sensorId)94 std::string getSensorMetricsId(int32_t sensorId) {
95     return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR).append(std::to_string(sensorId));
96 }
97 
98 }  // namespace
99 
SpatializerPoseController(Listener * listener,std::chrono::microseconds sensorPeriod,std::optional<std::chrono::microseconds> maxUpdatePeriod)100 SpatializerPoseController::SpatializerPoseController(Listener* listener,
101                                         std::chrono::microseconds sensorPeriod,
102                                         std::optional<std::chrono::microseconds> maxUpdatePeriod)
103     : mListener(listener),
104       mSensorPeriod(sensorPeriod),
105       mProcessor(createHeadTrackingProcessor(HeadTrackingProcessor::Options{
106               .maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
107               .maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
108               .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
109               .predictionDuration = []() -> float {
110                   const int duration_ms =
111                           property_get_int32("audio.spatializer.prediction_duration_ms", -1);
112                   if (duration_ms >= 0) {
113                       return duration_ms * 1'000'000LL;
114                   } else {
115                       return Ticks(kPredictionDuration).count();
116                   }
117               }(),
118               .autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
119               .autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
120               .autoRecenterRotationalThreshold = kAutoRecenterRotationThreshold,
121               .screenStillnessWindowDuration = Ticks(kScreenStillnessWindowDuration).count(),
122               .screenStillnessTranslationalThreshold = kScreenStillnessTranslationThreshold,
123               .screenStillnessRotationalThreshold = kScreenStillnessRotationThreshold,
124       })),
125       mPoseProvider(SensorPoseProvider::create("headtracker", this)),
__anon009e04c10302null126       mThread([this, maxUpdatePeriod] { // It's important that mThread is initialized after
127                                         // everything else because it runs a member
128                                         // function that may use any member
129                                         // of this class.
130           while (true) {
131               Pose3f headToStage;
132               std::optional<HeadTrackingMode> modeIfChanged;
133               {
134                   std::unique_lock lock(mMutex);
135                   if (maxUpdatePeriod.has_value()) {
136                       mCondVar.wait_for(lock, maxUpdatePeriod.value(),
137                                         [this] { return mShouldExit || mShouldCalculate; });
138                   } else {
139                       mCondVar.wait(lock, [this] { return mShouldExit || mShouldCalculate; });
140                   }
141                   if (mShouldExit) {
142                       ALOGV("Exiting thread");
143                       return;
144                   }
145 
146                   // Calculate.
147                   std::tie(headToStage, modeIfChanged) = calculate_l();
148               }
149 
150               // Invoke the callbacks outside the lock.
151               mListener->onHeadToStagePose(headToStage);
152               if (modeIfChanged) {
153                   mListener->onActualModeChange(modeIfChanged.value());
154               }
155 
156               {
157                   std::lock_guard lock(mMutex);
158                   if (!mCalculated) {
159                       mCalculated = true;
160                       mCondVar.notify_all();
161                   }
162                   mShouldCalculate = false;
163               }
164           }
165       }) {
166           const media::PosePredictorType posePredictorType =
167                   (media::PosePredictorType)
168                   property_get_int32("audio.spatializer.pose_predictor_type", -1);
169           if (isValidPosePredictorType(posePredictorType)) {
170               mProcessor->setPosePredictorType(posePredictorType);
171           }
172       }
173 
~SpatializerPoseController()174 SpatializerPoseController::~SpatializerPoseController() {
175     {
176         std::unique_lock lock(mMutex);
177         mShouldExit = true;
178         mCondVar.notify_all();
179     }
180     mThread.join();
181 }
182 
setHeadSensor(int32_t sensor)183 void SpatializerPoseController::setHeadSensor(int32_t sensor) {
184     std::lock_guard lock(mMutex);
185     if (sensor == mHeadSensor) return;
186     ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
187             __func__, sensor, mHeadSensor, mScreenSensor);
188 
189     // Stop current sensor, if valid and different from the other sensor.
190     if (mHeadSensor != INVALID_SENSOR && mHeadSensor != mScreenSensor) {
191         mPoseProvider->stopSensor(mHeadSensor);
192         mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
193             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
194             .record();
195     }
196 
197     if (sensor != INVALID_SENSOR) {
198         if (sensor != mScreenSensor) {
199             // Start new sensor.
200             mHeadSensor =
201                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
202             if (mHeadSensor != INVALID_SENSOR) {
203                 auto sensor = mPoseProvider->getSensorByHandle(mHeadSensor);
204                 std::string stringType = sensor ? sensor->getStringType().c_str() : "";
205                 mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
206                     .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
207                     .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_HEAD)
208                     .set(AMEDIAMETRICS_PROP_TYPE, stringType)
209                     .record();
210             }
211         } else {
212             // Sensor is already enabled.
213             mHeadSensor = mScreenSensor;
214         }
215     } else {
216         mHeadSensor = INVALID_SENSOR;
217     }
218 
219     mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */, __func__);
220 }
221 
setScreenSensor(int32_t sensor)222 void SpatializerPoseController::setScreenSensor(int32_t sensor) {
223     std::lock_guard lock(mMutex);
224     if (sensor == mScreenSensor) return;
225     ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
226             __func__, sensor, mHeadSensor, mScreenSensor);
227 
228     // Stop current sensor, if valid and different from the other sensor.
229     if (mScreenSensor != INVALID_SENSOR && mScreenSensor != mHeadSensor) {
230         mPoseProvider->stopSensor(mScreenSensor);
231         mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
232             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
233             .record();
234     }
235 
236     if (sensor != INVALID_SENSOR) {
237         if (sensor != mHeadSensor) {
238             // Start new sensor.
239             mScreenSensor =
240                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
241             auto sensor = mPoseProvider->getSensorByHandle(mScreenSensor);
242             std::string stringType = sensor ? sensor->getStringType().c_str() : "";
243             mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
244                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
245                 .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN)
246                 .set(AMEDIAMETRICS_PROP_TYPE, stringType)
247                 .record();
248         } else {
249             // Sensor is already enabled.
250             mScreenSensor = mHeadSensor;
251         }
252     } else {
253         mScreenSensor = INVALID_SENSOR;
254     }
255 
256     mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */, __func__);
257 }
258 
setDesiredMode(HeadTrackingMode mode)259 void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
260     std::lock_guard lock(mMutex);
261     mProcessor->setDesiredMode(mode);
262 }
263 
setScreenToStagePose(const Pose3f & screenToStage)264 void SpatializerPoseController::setScreenToStagePose(const Pose3f& screenToStage) {
265     std::lock_guard lock(mMutex);
266     mProcessor->setScreenToStagePose(screenToStage);
267 }
268 
setDisplayOrientation(float physicalToLogicalAngle)269 void SpatializerPoseController::setDisplayOrientation(float physicalToLogicalAngle) {
270     std::lock_guard lock(mMutex);
271     mProcessor->setDisplayOrientation(physicalToLogicalAngle);
272 }
273 
calculateAsync()274 void SpatializerPoseController::calculateAsync() {
275     std::lock_guard lock(mMutex);
276     mShouldCalculate = true;
277     mCondVar.notify_all();
278 }
279 
waitUntilCalculated()280 void SpatializerPoseController::waitUntilCalculated() {
281     std::unique_lock lock(mMutex);
282     mCondVar.wait(lock, [this] { return mCalculated; });
283 }
284 
285 std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>>
calculate_l()286 SpatializerPoseController::calculate_l() {
287     Pose3f headToStage;
288     HeadTrackingMode mode;
289     std::optional<media::HeadTrackingMode> modeIfChanged;
290 
291     mProcessor->calculate(elapsedRealtimeNano());
292     headToStage = mProcessor->getHeadToStagePose();
293     mode = mProcessor->getActualMode();
294     if (!mActualMode.has_value() || mActualMode.value() != mode) {
295         mActualMode = mode;
296         modeIfChanged = mode;
297     }
298     return std::make_tuple(headToStage, modeIfChanged);
299 }
300 
recenter()301 void SpatializerPoseController::recenter() {
302     std::lock_guard lock(mMutex);
303     mProcessor->recenter(true /* recenterHead */, true /* recenterScreen */, __func__);
304 }
305 
onPose(int64_t timestamp,int32_t sensor,const Pose3f & pose,const std::optional<Twist3f> & twist,bool isNewReference)306 void SpatializerPoseController::onPose(int64_t timestamp, int32_t sensor, const Pose3f& pose,
307                                        const std::optional<Twist3f>& twist, bool isNewReference) {
308     std::lock_guard lock(mMutex);
309     constexpr float NANOS_TO_MILLIS = 1e-6;
310     constexpr float RAD_TO_DEGREE = 180.f / M_PI;
311 
312     const float delayMs = (elapsedRealtimeNano() - timestamp) * NANOS_TO_MILLIS; // CLOCK_BOOTTIME
313 
314     if (sensor == mHeadSensor) {
315         std::vector<float> pryprydt(8);  // pitch, roll, yaw, d_pitch, d_roll, d_yaw,
316                                          // discontinuity, timestamp_delay
317         media::quaternionToAngles(pose.rotation(), &pryprydt[0], &pryprydt[1], &pryprydt[2]);
318         if (twist) {
319             const auto rotationalVelocity = twist->rotationalVelocity();
320             // The rotational velocity is an intrinsic transform (i.e. based on the head
321             // coordinate system, not the world coordinate system).  It is a 3 element vector:
322             // axis (d theta / dt).
323             //
324             // We leave rotational velocity relative to the head coordinate system,
325             // as the initial head tracking sensor's world frame is arbitrary.
326             media::quaternionToAngles(media::rotationVectorToQuaternion(rotationalVelocity),
327                     &pryprydt[3], &pryprydt[4], &pryprydt[5]);
328         }
329         pryprydt[6] = isNewReference;
330         pryprydt[7] = delayMs;
331         for (size_t i = 0; i < 6; ++i) {
332             // pitch, roll, yaw in degrees, referenced in degrees on the world frame.
333             // d_pitch, d_roll, d_yaw rotational velocity in degrees/s, based on the world frame.
334             pryprydt[i] *= RAD_TO_DEGREE;
335         }
336         mHeadSensorRecorder.record(pryprydt);
337         mHeadSensorDurableRecorder.record(pryprydt);
338 
339         mProcessor->setWorldToHeadPose(timestamp, pose,
340                                        twist.value_or(Twist3f()) / kTicksPerSecond);
341         if (isNewReference) {
342             mProcessor->recenter(true, false, __func__);
343         }
344     }
345     if (sensor == mScreenSensor) {
346         std::vector<float> pryt{ 0.f, 0.f, 0.f, delayMs}; // pitch, roll, yaw, timestamp_delay
347         media::quaternionToAngles(pose.rotation(), &pryt[0], &pryt[1], &pryt[2]);
348         for (size_t i = 0; i < 3; ++i) {
349             pryt[i] *= RAD_TO_DEGREE;
350         }
351         mScreenSensorRecorder.record(pryt);
352         mScreenSensorDurableRecorder.record(pryt);
353 
354         mProcessor->setWorldToScreenPose(timestamp, pose);
355         if (isNewReference) {
356             mProcessor->recenter(false, true, __func__);
357         }
358     }
359 }
360 
toString(unsigned level) const361 std::string SpatializerPoseController::toString(unsigned level) const {
362     std::string prefixSpace(level, ' ');
363     std::string ss = prefixSpace + "SpatializerPoseController:\n";
364     bool needUnlock = false;
365 
366     prefixSpace += ' ';
367     auto now = std::chrono::steady_clock::now();
368     if (!mMutex.try_lock_until(now + media::kSpatializerDumpSysTimeOutInSecond)) {
369         ss.append(prefixSpace).append("try_lock failed, dumpsys maybe INACCURATE!\n");
370     } else {
371         needUnlock = true;
372     }
373 
374     ss += prefixSpace;
375     if (mHeadSensor == INVALID_SENSOR) {
376         ss += "HeadSensor: INVALID\n";
377     } else {
378         base::StringAppendF(&ss, "HeadSensor: 0x%08x "
379             "(active world-to-head : head-relative velocity) "
380             "[ pitch, roll, yaw : d_pitch, d_roll, d_yaw : disc : delay ] "
381             "(degrees, degrees/s, bool, ms)\n", mHeadSensor);
382         ss.append(prefixSpace)
383             .append(" PerMinuteHistory:\n")
384             .append(mHeadSensorDurableRecorder.toString(level + 3))
385             .append(prefixSpace)
386             .append(" PerSecondHistory:\n")
387             .append(mHeadSensorRecorder.toString(level + 3));
388     }
389 
390     ss += prefixSpace;
391     if (mScreenSensor == INVALID_SENSOR) {
392         ss += "ScreenSensor: INVALID\n";
393     } else {
394         base::StringAppendF(&ss, "ScreenSensor: 0x%08x (active world-to-screen) "
395             "[ pitch, roll, yaw : delay ] "
396             "(degrees, ms)\n", mScreenSensor);
397         ss.append(prefixSpace)
398             .append(" PerMinuteHistory:\n")
399             .append(mScreenSensorDurableRecorder.toString(level + 3))
400             .append(prefixSpace)
401             .append(" PerSecondHistory:\n")
402             .append(mScreenSensorRecorder.toString(level + 3));
403     }
404 
405     ss += prefixSpace;
406     if (mActualMode.has_value()) {
407         base::StringAppendF(&ss, "ActualMode: %s\n", media::toString(mActualMode.value()).c_str());
408     } else {
409         ss += "ActualMode NOTEXIST\n";
410     }
411 
412     if (mProcessor) {
413         ss += mProcessor->toString_l(level + 1);
414     } else {
415         ss.append(prefixSpace.c_str()).append("HeadTrackingProcessor not exist\n");
416     }
417 
418     if (mPoseProvider) {
419         ss += mPoseProvider->toString(level + 1);
420     } else {
421         ss.append(prefixSpace.c_str()).append("SensorPoseProvider not exist\n");
422     }
423 
424     if (needUnlock) {
425         mMutex.unlock();
426     }
427     // TODO: 233092747 add history sensor info with SimpleLog.
428     return ss;
429 }
430 
431 }  // namespace android
432