• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "chre/platform/platform_sensor_manager.h"
18 
19 #include "sns_std_sensor.pb.h"
20 #include "stringl.h"
21 
22 #include <cmath>
23 
24 #include "chre/core/event_loop_manager.h"
25 #include "chre/core/sensor.h"
26 #include "chre/core/sensor_type_helpers.h"
27 #include "chre/platform/log.h"
28 #include "chre/platform/slpi/power_control_util.h"
29 #include "chre/platform/system_time.h"
30 #include "chre/util/nested_data_ptr.h"
31 #include "chre_api/chre/sensor.h"
32 
33 #ifdef CHREX_SENSOR_SUPPORT
34 #include "chre/extensions/platform/slpi/see/vendor_data_types.h"
35 #endif  // CHREX_SENSOR_SUPPORT
36 
37 #ifdef CHRE_VARIANT_SUPPLIES_SEE_SENSORS_LIST
38 #include "see_sensors.h"
39 #endif  // CHRE_VARIANT_SUPPLIES_SEE_SENSORS_LIST
40 
41 #ifndef CHRE_SEE_NUM_TEMP_SENSORS
42 // There are usually more than one 'sensor_temperature' sensors in SEE.
43 // Define this in the variant-specific makefile to avoid missing sensors in
44 // sensor discovery.
45 #error "CHRE_SEE_NUM_TEMP_SENSORS is not defined"
46 #endif
47 
48 namespace chre {
49 namespace {
50 
51 //! A struct to facilitate sensor discovery
52 struct SuidAttr {
53   sns_std_suid suid;
54   SeeAttributes attr;
55 };
56 
57 #ifndef CHRE_VARIANT_SUPPLIES_SEE_SENSORS_LIST
58 
59 //! The list of SEE platform sensor data types that CHRE intends to support.
60 //! The standardized strings are defined in sns_xxx.proto.
61 const char *kSeeDataTypes[] = {
62     "accel",
63     "gyro",
64     "mag",
65     "pressure",
66     "ambient_light",
67     "proximity",
68 #ifdef CHRE_SLPI_DEFAULT_BUILD
69     // Both instant motion and stationary detect share the same data type.
70     "amd",
71     "amd",
72 #else
73     "motion_detect", "stationary_detect",
74 #endif
75 };
76 
77 #endif  // CHRE_VARIANT_SUPPLIES_SEE_SENSORS_LIST
78 
79 #ifdef CHRE_SLPI_UIMG_ENABLED
80 #ifndef CHREX_SENSOR_SUPPORT
81 // The current implementation uses vendor sensor type 3 to remap into accel,
82 // with requests made through QMI instead of QSockets, as SEE does not support
83 // micro-image batching in QCM.
84 #error "CHRE extensions are required for micro-image SEE support"
85 #endif  // CHREX_SENSOR_SUPPORT
86 
isBigImageSensor(const Sensor & sensor)87 bool isBigImageSensor(const Sensor &sensor) {
88   return sensor.getTargetGroupMask() == NanoappGroupIds::BigImage;
89 }
90 
sensorTypeSupportsBigImage(uint8_t sensorType)91 bool sensorTypeSupportsBigImage(uint8_t sensorType) {
92   return (sensorType == CHRE_SENSOR_TYPE_ACCELEROMETER ||
93           sensorType == CHRE_SENSOR_TYPE_UNCALIBRATED_ACCELEROMETER ||
94           sensorType == CHRE_SENSOR_TYPE_UNCALIBRATED_GYROSCOPE ||
95           sensorType == CHRE_SENSOR_TYPE_UNCALIBRATED_GEOMAGNETIC_FIELD ||
96           sensorType == CHRE_SENSOR_TYPE_LIGHT);
97 }
98 
isBigImageSensorType(uint8_t sensorType)99 bool isBigImageSensorType(uint8_t sensorType) {
100   return (sensorType == CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_ACCEL ||
101           sensorType == CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_ACCEL ||
102           sensorType == CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_GYRO ||
103           sensorType == CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_MAG ||
104           sensorType == CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_LIGHT);
105 }
106 
getBigImageSensorType(uint8_t sensorType)107 uint8_t getBigImageSensorType(uint8_t sensorType) {
108   switch (sensorType) {
109     case CHRE_SENSOR_TYPE_ACCELEROMETER:
110       return CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_ACCEL;
111     case CHRE_SENSOR_TYPE_UNCALIBRATED_ACCELEROMETER:
112       return CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_ACCEL;
113     case CHRE_SENSOR_TYPE_UNCALIBRATED_GYROSCOPE:
114       return CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_GYRO;
115     case CHRE_SENSOR_TYPE_UNCALIBRATED_GEOMAGNETIC_FIELD:
116       return CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_MAG;
117     case CHRE_SENSOR_TYPE_LIGHT:
118       return CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_LIGHT;
119     default:
120       return sensorType;
121   }
122 }
123 
124 /**
125  * Obtains the micro-image sensor type given the specified sensor type.
126  *
127  * @param sensorType The sensor type to convert from.
128  * @return The associated micro-image sensor type, or the input sensor type
129  *     if not associated with one
130  */
getUimgSensorType(uint8_t sensorType)131 uint8_t getUimgSensorType(uint8_t sensorType) {
132   switch (sensorType) {
133     case CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_ACCEL:
134       return CHRE_SENSOR_TYPE_ACCELEROMETER;
135     case CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_ACCEL:
136       return CHRE_SENSOR_TYPE_UNCALIBRATED_ACCELEROMETER;
137     case CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_GYRO:
138       return CHRE_SENSOR_TYPE_UNCALIBRATED_GYROSCOPE;
139     case CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_UNCAL_MAG:
140       return CHRE_SENSOR_TYPE_UNCALIBRATED_GEOMAGNETIC_FIELD;
141     case CHRE_SLPI_SENSOR_TYPE_BIG_IMAGE_LIGHT:
142       return CHRE_SENSOR_TYPE_LIGHT;
143     default:
144       return sensorType;
145   }
146 }
147 
148 #endif  // CHRE_SLPI_UIMG_ENABLED
149 
150 #ifndef CHRE_LOG_ONLY_NO_SENSOR
151 /**
152  * Callback function which will run after a delay if a required sensor is not
153  * found.
154  */
crashAfterSensorNotFoundCallback(uint16_t,void *,void *)155 void crashAfterSensorNotFoundCallback(uint16_t /* eventType */,
156                                       void * /* data */,
157                                       void * /* extraData */) {
158   FATAL_ERROR("Missing required sensor(s)");
159 }
160 #endif
161 
handleMissingSensor()162 void handleMissingSensor() {
163   // Try rebooting if a sensor is missing, which might help recover from a
164   // transient failure/race condition at startup. But to avoid endless crashes,
165   // only do this within 15 seconds of the timeout on initializing SEE - we rely
166   // on knowledge that getMonotonicTime() maps into QTimer here, and QTimer only
167   // resets when the entire system is rebooted (it continues increasing after
168   // SLPI SSR).
169 #ifndef CHRE_LOG_ONLY_NO_SENSOR
170   if (SystemTime::getMonotonicTime() < (kDefaultSeeWaitTimeout + Seconds(15))) {
171     Nanoseconds delay(5 * Seconds(60).toRawNanoseconds());  // 5 minutes
172     EventLoopManagerSingleton::get()->setDelayedCallback(
173         SystemCallbackType::DelayedFatalError, nullptr,
174         crashAfterSensorNotFoundCallback, delay);
175   }
176 #endif
177   LOGE("Missing required sensor(s)");
178 }
179 
180 /**
181  * Obtains the sensor type given the specified data type and whether the sensor
182  * is runtime-calibrated or not.
183  *
184  * @return Whether a sensor type was found for the given data type.
185  */
getSensorTypeFromDataType(const char * dataType,bool calibrated,uint8_t * sensorType,bool bigImage=false)186 bool getSensorTypeFromDataType(const char *dataType, bool calibrated,
187                                uint8_t *sensorType, bool bigImage = false) {
188   bool success = true;
189   if (strcmp(dataType, "accel") == 0) {
190     if (calibrated) {
191       *sensorType = CHRE_SENSOR_TYPE_ACCELEROMETER;
192     } else {
193       *sensorType = CHRE_SENSOR_TYPE_UNCALIBRATED_ACCELEROMETER;
194     }
195   } else if (strcmp(dataType, "gyro") == 0) {
196     if (calibrated) {
197       *sensorType = CHRE_SENSOR_TYPE_GYROSCOPE;
198     } else {
199       *sensorType = CHRE_SENSOR_TYPE_UNCALIBRATED_GYROSCOPE;
200     }
201   } else if (strcmp(dataType, "mag") == 0) {
202     if (calibrated) {
203       *sensorType = CHRE_SENSOR_TYPE_GEOMAGNETIC_FIELD;
204     } else {
205       *sensorType = CHRE_SENSOR_TYPE_UNCALIBRATED_GEOMAGNETIC_FIELD;
206     }
207   } else if (strcmp(dataType, "pressure") == 0) {
208     *sensorType = CHRE_SENSOR_TYPE_PRESSURE;
209   } else if (strcmp(dataType, "ambient_light") == 0) {
210     *sensorType = CHRE_SENSOR_TYPE_LIGHT;
211   } else if (strcmp(dataType, "proximity") == 0) {
212     *sensorType = CHRE_SENSOR_TYPE_PROXIMITY;
213   } else if (strcmp(dataType, "motion_detect") == 0 ||
214              strcmp(dataType, "amd") == 0) {
215     *sensorType = CHRE_SENSOR_TYPE_INSTANT_MOTION_DETECT;
216   } else if (strcmp(dataType, "stationary_detect") == 0) {
217     *sensorType = CHRE_SENSOR_TYPE_STATIONARY_DETECT;
218   } else if (strcmp(dataType, "step_detect") == 0) {
219     *sensorType = CHRE_SENSOR_TYPE_STEP_DETECT;
220   } else {
221 #ifdef CHREX_SENSOR_SUPPORT
222     success = extension::vendorSensorTypeFromDataType(dataType, calibrated,
223                                                       sensorType);
224   }
225 #else
226     success = false;
227   }
228 #endif
229 
230 #ifdef CHRE_SLPI_UIMG_ENABLED
231   if (bigImage && !sensorTypeSupportsBigImage(*sensorType)) {
232     success = false;
233   }
234 #endif
235 
236   return success;
237 }
238 
239 /**
240  * Merges a status update with an existing sampling status.
241  */
mergeUpdatedStatus(uint32_t sensorHandle,const SeeHelperCallbackInterface::SamplingStatusData & update,struct chreSensorSamplingStatus * mergedStatus)242 void mergeUpdatedStatus(
243     uint32_t sensorHandle,
244     const SeeHelperCallbackInterface::SamplingStatusData &update,
245     struct chreSensorSamplingStatus *mergedStatus) {
246   Sensor *sensor = getSensorRequestManager().getSensor(sensorHandle);
247   sensor->getSamplingStatus(mergedStatus);
248 
249   if (update.enabledValid) {
250     mergedStatus->enabled = update.status.enabled;
251   }
252   if (update.intervalValid) {
253     mergedStatus->interval = update.status.interval;
254   }
255   if (update.latencyValid) {
256     mergedStatus->latency = update.status.latency;
257   }
258 }
259 
260 /**
261  * Helper function to post a bias event given the bias data.
262  *
263  * @param sensorType The sensor type to post the event for.
264  * @param bias The bias data.
265  */
postSensorBiasEvent(uint8_t sensorType,uint16_t targetGroupMask,const chreSensorThreeAxisData & bias)266 void postSensorBiasEvent(uint8_t sensorType, uint16_t targetGroupMask,
267                          const chreSensorThreeAxisData &bias) {
268   uint32_t sensorHandle;
269   if (getSensorRequestManager().getSensorHandle(
270           sensorType, 0 /* sensorIndex */, targetGroupMask, &sensorHandle)) {
271     auto *event = memoryAlloc<struct chreSensorThreeAxisData>();
272     if (event == nullptr) {
273       LOG_OOM();
274     } else {
275       *event = bias;
276       event->header.sensorHandle = sensorHandle;
277       getSensorRequestManager().handleBiasEvent(sensorHandle, event);
278     }
279   }
280 }
281 
282 /**
283  * Compares the given status updates and returns true if they are the same.
284  *
285  * A simple memcmp cannot be done because if a given field is not valid, then
286  * the field may be different across updates, but doesn't indicate the update
287  * is different.
288  */
isSameStatusUpdate(const SeeHelperCallbackInterface::SamplingStatusData & status1,const SeeHelperCallbackInterface::SamplingStatusData & status2)289 bool isSameStatusUpdate(
290     const SeeHelperCallbackInterface::SamplingStatusData &status1,
291     const SeeHelperCallbackInterface::SamplingStatusData &status2) {
292   bool sameStatus = status1.enabledValid == status2.enabledValid;
293   if (sameStatus && status1.enabledValid) {
294     sameStatus &= status1.status.enabled == status2.status.enabled;
295   }
296   // Only check interval / latency fields if both status updates say the sensor
297   // is enabled since CHRE doesn't care what the fields are set to if the sensor
298   // is disabled.
299   if (sameStatus && status1.status.enabled) {
300     sameStatus &= status1.intervalValid == status2.intervalValid;
301     if (sameStatus && status1.intervalValid) {
302       sameStatus &= status1.status.interval == status2.status.interval;
303     }
304     sameStatus &= status1.latencyValid == status2.latencyValid;
305     if (sameStatus && status1.latencyValid) {
306       sameStatus &= status1.status.latency == status2.status.latency;
307     }
308   }
309   return sameStatus;
310 }
311 
312 /**
313  * Constructs and initializes a sensor, and adds it to the sensor list.
314  *
315  * @param seeHelper SeeHelper instance to register sensor with
316  * @param sensorType The sensor type of the sensor.
317  * @param targetGroupMask The mask of target groups this sensor supports.
318  * @param suid The SUID of the sensor as provided by SEE.
319  * @param attr A reference to SeeAttrbutes.
320  * @param sensors The sensor list.
321  */
addSensor(SeeHelper & seeHelper,uint8_t sensorType,uint16_t targetGroupMask,const sns_std_suid & suid,const SeeAttributes & attr,DynamicVector<Sensor> * sensors)322 void addSensor(SeeHelper &seeHelper, uint8_t sensorType,
323                uint16_t targetGroupMask, const sns_std_suid &suid,
324                const SeeAttributes &attr, DynamicVector<Sensor> *sensors) {
325   // Concatenate vendor and name with a space in between.
326   char sensorName[kSensorNameMaxLen];
327   strlcpy(sensorName, attr.vendor, sizeof(sensorName));
328   strlcat(sensorName, " ", sizeof(sensorName));
329   strlcat(sensorName, attr.name, sizeof(sensorName));
330 
331   // Some sensors have a max sample rate of 0 which makes ceilf return infinity
332   // for on-change or continuous sensors when that's not the correct
333   // minInterval.
334   float maxSampleRate = (attr.maxSampleRate == 0.0f) ? 10 : attr.maxSampleRate;
335 
336   // Override one-shot sensor's minInterval to default
337   uint64_t minInterval =
338       SensorTypeHelpers::isOneShot(sensorType)
339           ? CHRE_SENSOR_INTERVAL_DEFAULT
340           : static_cast<uint64_t>(
341                 ceilf(Seconds(1).toRawNanoseconds() / maxSampleRate));
342 
343   if (!sensors->emplace_back()) {
344     FATAL_ERROR("Failed to allocate new sensor: out of memory");
345   }
346 
347   // The sensor base class must be initialized before the main Sensor init()
348   // can be invoked as init() is allowed to invoke base class methods.
349   sensors->back().initBase(sensorType, minInterval, sensorName,
350                            attr.passiveRequest, targetGroupMask);
351   sensors->back().init();
352 
353 #ifdef CHRE_SLPI_UIMG_ENABLED
354   bool resample = false;
355   if (sensorTypeSupportsBigImage(sensorType) &&
356       targetGroupMask == NanoappGroupIds::BigImage) {
357     // Resample big image sensors to reduce system load during sw flush.
358     resample = true;
359     // Use the big-image sensor type so that it's clear which samples are coming
360     // from the big-image SEE helper. This type is mapped back to the standard
361     // CHRE type before anything is sent to nanoapps.
362     sensorType = getBigImageSensorType(sensorType);
363   }
364 #else
365   bool resample = false;
366 #endif
367   bool prevRegistered;
368   bool registered =
369       seeHelper.registerSensor(sensorType, suid, resample, &prevRegistered);
370   if (!registered && prevRegistered) {
371     LOGW("SUID has been previously registered");
372   } else if (!registered) {
373     FATAL_ERROR("Failed to register SUID/SensorType mapping.");
374   }
375 }
376 
377 /**
378  * Compare SEE reported stream type attribute to the expected one. Some SEE
379  * sensors may support more than one stream type.
380  */
isStreamTypeCorrect(uint8_t sensorType,uint8_t streamType)381 bool isStreamTypeCorrect(uint8_t sensorType, uint8_t streamType) {
382   bool success = true;
383   if ((SensorTypeHelpers::isContinuous(sensorType) &&
384        streamType != SNS_STD_SENSOR_STREAM_TYPE_STREAMING) ||
385       (SensorTypeHelpers::isOnChange(sensorType) &&
386        streamType != SNS_STD_SENSOR_STREAM_TYPE_ON_CHANGE)
387 // The default SLPI build exposes instant motion / stationary sensors as
388 // on-change, but CHRE uses them as one-shot
389 #ifndef CHRE_SLPI_DEFAULT_BUILD
390       || (SensorTypeHelpers::isOneShot(sensorType) &&
391           streamType != SNS_STD_SENSOR_STREAM_TYPE_SINGLE_OUTPUT)
392 #endif
393   ) {
394     success = false;
395     LOGW("Inconsistent sensor type %" PRIu8 " and stream type %" PRIu8,
396          static_cast<uint8_t>(sensorType), streamType);
397   }
398   return success;
399 }
400 
401 /**
402  * Obtains the list of SUIDs and their attributes that support the specified
403  * data type.
404  */
getSuidAndAttrs(SeeHelper & seeHelper,const char * dataType,DynamicVector<SuidAttr> * suidAttrs,uint8_t minNumSuids)405 bool getSuidAndAttrs(SeeHelper &seeHelper, const char *dataType,
406                      DynamicVector<SuidAttr> *suidAttrs, uint8_t minNumSuids) {
407   DynamicVector<sns_std_suid> suids;
408   bool success = seeHelper.findSuidSync(dataType, &suids, minNumSuids);
409   if (!success) {
410     LOGE("Failed to find sensor '%s'", dataType);
411   } else {
412     LOGV("Num of SUIDs found for '%s': %zu", dataType, suids.size());
413 
414     for (const auto &suid : suids) {
415       SeeAttributes attr;
416       if (!seeHelper.getAttributesSync(suid, &attr)) {
417         success = false;
418         LOGE("Failed to get attributes of SUID 0x%" PRIx64 " %" PRIx64,
419              suid.suid_high, suid.suid_low);
420       } else {
421         LOGV("%s %s, hw id %" PRId64 ", max ODR %f Hz, stream type %" PRIu8
422              " passive %d",
423              attr.vendor, attr.name, attr.hwId, attr.maxSampleRate,
424              attr.streamType, attr.passiveRequest);
425         SuidAttr sensor = {
426             .suid = suid,
427             .attr = attr,
428         };
429         if (!suidAttrs->push_back(sensor)) {
430           success = false;
431           LOG_OOM();
432         }
433       }
434     }
435   }
436   return success;
437 }
438 
439 #ifndef CHRE_SLPI_DEFAULT_BUILD
440 //! Check whether two sensors with the specified attrtibutes belong to the same
441 //! sensor hardware module.
sensorHwMatch(const SeeAttributes & attr0,const SeeAttributes & attr1)442 bool sensorHwMatch(const SeeAttributes &attr0, const SeeAttributes &attr1) {
443   // When HW ID is absent, its default to 0 and won't be a factor.
444   return ((strncmp(attr0.vendor, attr1.vendor, kSeeAttrStrValLen) == 0) &&
445           (strncmp(attr0.name, attr1.name, kSeeAttrStrValLen) == 0) &&
446           (attr0.hwId == attr1.hwId));
447 }
448 #endif
449 
450 /**
451  * Looks up SUID(s) associated with a given sensor data type string and sensor
452  * type enum, registers them with SeeHelper, and adds a Sensor instance to the
453  * supplied vector for use in CHRE. When given an uncalibrated sensor type, will
454  * also look for and add the calibrated sensor type.
455  *
456  * @param seeHelper SeeHelper instance to use for lookup/registration
457  * @param temperatureSensors List of previously discovered temperature sensor
458  *        info to use for adding temp sensors associated with this sensor type
459  * @param dataType SEE data type string
460  * @param sensorType CHRE sensor type associated with dataType
461  * @param targetGroupMask Group mask sensors that are added should target
462  * @param skipAdditionalTypes if true, don't attempt to add
463  *        calibrated/temperature sensor types associated with this sensorType
464  * @param sensors Vector to append found sensor(s) to
465  */
findAndAddSensorsForType(SeeHelper & seeHelper,const DynamicVector<SuidAttr> & temperatureSensors,const char * dataType,uint8_t sensorType,uint16_t targetGroupMask,bool skipAdditionalTypes,DynamicVector<Sensor> * sensors)466 void findAndAddSensorsForType(SeeHelper &seeHelper,
467                               const DynamicVector<SuidAttr> &temperatureSensors,
468                               const char *dataType, uint8_t sensorType,
469                               uint16_t targetGroupMask,
470                               bool skipAdditionalTypes,
471                               DynamicVector<Sensor> *sensors) {
472   DynamicVector<SuidAttr> primarySensors;
473   if (!getSuidAndAttrs(seeHelper, dataType, &primarySensors,
474                        1 /* minNumSuids */)) {
475     handleMissingSensor();
476   }
477 
478   for (const auto &primarySensor : primarySensors) {
479     sns_std_suid suid = primarySensor.suid;
480     SeeAttributes attr = primarySensor.attr;
481 
482     // Some sensors support both continuous and on-change streams.
483     // If there are more than one SUIDs that support the data type,
484     // choose the first one that has the expected stream type.
485     if (isStreamTypeCorrect(sensorType, attr.streamType)) {
486       addSensor(seeHelper, sensorType, targetGroupMask, suid, attr, sensors);
487 
488       if (!skipAdditionalTypes) {
489         // Check if this sensor has a runtime-calibrated version.
490         uint8_t calibratedType;
491         if (getSensorTypeFromDataType(dataType, true /* calibrated */,
492                                       &calibratedType) &&
493             calibratedType != sensorType) {
494           uint16_t calTargetGroupMask = targetGroupMask;
495 #ifdef CHRE_SLPI_UIMG_ENABLED
496           if (!sensorTypeSupportsBigImage(calibratedType)) {
497             // Override the target group mask if the calibrated type isn't
498             // supported in big-image to enforce that the calibrated type is
499             // exposed to big-image nanoapps.
500             calTargetGroupMask = kDefaultTargetGroupMask;
501           }
502 #endif
503           addSensor(seeHelper, calibratedType, calTargetGroupMask, suid, attr,
504                     sensors);
505         }
506 
507         // Check if this sensor has a secondary temperature sensor.
508         uint8_t temperatureType =
509             PlatformSensorTypeHelpers::getTempSensorType(sensorType);
510         if (temperatureType != CHRE_SENSOR_TYPE_INVALID) {
511           bool tempFound = false;
512           for (const auto &tempSensor : temperatureSensors) {
513             sns_std_suid tempSuid = tempSensor.suid;
514             SeeAttributes tempAttr = tempSensor.attr;
515 
516 #ifdef CHRE_SLPI_DEFAULT_BUILD
517             // The default build exposes a single temp sensor to be used for
518             // all temperature sensors that doesn't have the same attributes
519             // as the primarySensor.
520             if (true) {
521 #else
522             if (sensorHwMatch(attr, tempAttr)) {
523 #endif
524               LOGV("Found matching temperature sensor type");
525               tempFound = true;
526               // Temp sensors aren't currently separated for big-image / uimg
527               // so always use the default mask when adding them.
528               constexpr uint16_t kTempGroupMask = kDefaultTargetGroupMask;
529               addSensor(seeHelper, temperatureType, kTempGroupMask, tempSuid,
530                         tempAttr, sensors);
531               break;
532             }
533           }
534           if (!tempFound) {
535             LOGW("Temperature sensor type %" PRIu8 " not found!",
536                  static_cast<uint8_t>(temperatureType));
537           }
538         }
539       }
540       break;
541     }
542   }
543 }
544 
545 void postSamplingUpdateForSensor(
546     Sensor *sensor, uint32_t sensorHandle,
547     UniquePtr<SeeHelperCallbackInterface::SamplingStatusData> &&status) {
548   // Ignore the enabled flag from status update if this is not a passive mode
549   // supported sensor because this may cause the sampling status in CHRE to
550   // go out of sync with reality
551   if (!sensor->supportsPassiveMode()) {
552     status->status.enabled = sensor->mLastReceivedSamplingStatus.status.enabled;
553     status->enabledValid = sensor->mLastReceivedSamplingStatus.enabledValid;
554   }
555   if (!isSameStatusUpdate(sensor->mLastReceivedSamplingStatus, *status.get())) {
556     sensor->mLastReceivedSamplingStatus = *status.get();
557 
558     auto callback = [](uint16_t /* type */, void *data, void *extraData) {
559       uint32_t sensorHandle = NestedDataPtr<uint32_t>(extraData);
560       auto *samplingStatus =
561           static_cast<SeeHelperCallbackInterface::SamplingStatusData *>(data);
562 
563       // This memory will be freed via releaseSamplingStatusUpdate()
564       struct chreSensorSamplingStatus *status =
565           memoryAlloc<struct chreSensorSamplingStatus>();
566       mergeUpdatedStatus(sensorHandle, *samplingStatus, status);
567 
568       getSensorRequestManager().handleSamplingStatusUpdate(sensorHandle,
569                                                            status);
570       memoryFree(samplingStatus);
571     };
572     // Schedule a deferred callback to handle sensor status change in the main
573     // thread.
574     EventLoopManagerSingleton::get()->deferCallback(
575         SystemCallbackType::SensorStatusUpdate, status.release(), callback,
576         NestedDataPtr<uint32_t>(sensorHandle));
577   }
578 }
579 
580 }  // namespace
581 
~PlatformSensorManager()582 PlatformSensorManager::~PlatformSensorManager() {}
583 
init()584 void PlatformSensorManager::init() {
585   if (!mSeeHelper.init(this)) {
586     FATAL_ERROR("Failed to initialize SEE helper");
587   }
588 
589 #ifdef CHRE_SLPI_UIMG_ENABLED
590   if (!mBigImageSeeHelper.init(this, kDefaultSeeWaitTimeout,
591                                true /* skipDefaultSensorInit */)) {
592     FATAL_ERROR("Failed to init bimg SEE helper");
593   }
594 #endif  // CHRE_SLPI_UIMG_ENABLED
595 }
596 
getTargetGroupId(const Nanoapp & nanoapp) const597 uint16_t PlatformSensorManager::getTargetGroupId(const Nanoapp &nanoapp) const {
598 #ifdef CHRE_SLPI_UIMG_ENABLED
599   return (nanoapp.isUimgApp()) ? NanoappGroupIds::MicroImage
600                                : NanoappGroupIds::BigImage;
601 #else
602   return NanoappGroupIds::BigImage;
603 #endif
604 }
605 
getSeeHelperForSensor(const Sensor & sensor)606 SeeHelper &PlatformSensorManagerBase::getSeeHelperForSensor(
607     const Sensor &sensor) {
608 #ifdef CHRE_SLPI_UIMG_ENABLED
609   if (isBigImageSensor(sensor)) {
610     slpiForceBigImage();
611     return mBigImageSeeHelper;
612   } else
613 #endif
614   {
615     return mSeeHelper;
616   }
617 }
618 
619 #ifdef CHRE_SLPI_UIMG_ENABLED
getBigImageSensors(DynamicVector<Sensor> * sensors)620 void PlatformSensorManagerBase::getBigImageSensors(
621     DynamicVector<Sensor> *sensors) {
622   CHRE_ASSERT(sensors);
623 
624   // Currently, just adding calibrated accel, uncal accel/gyro/mag and als as
625   // they are the ones we know that big image nanoapps will need at a different
626   // batching rate compared to uimg.
627   const char *kBigImageDataTypes[] = {
628       "accel",
629       "gyro",
630       "mag",
631       "ambient_light",
632   };
633 
634   DynamicVector<SuidAttr> nullTemperatureSensorList;
635   constexpr uint16_t kTargetGroupMask = NanoappGroupIds::BigImage;
636 
637   for (size_t i = 0; i < ARRAY_SIZE(kBigImageDataTypes); i++) {
638     const char *dataType = kBigImageDataTypes[i];
639     // Loop through potential cal/uncal sensors.
640     for (size_t j = 0; j < 2; j++) {
641       uint8_t sensorType;
642       if (getSensorTypeFromDataType(dataType, (j == 0) /* calibrated */,
643                                     &sensorType),
644           true /* bigImage */) {
645         findAndAddSensorsForType(mBigImageSeeHelper, nullTemperatureSensorList,
646                                  dataType, sensorType, kTargetGroupMask,
647                                  true /* skipAdditionalTypes */, sensors);
648       }
649     }
650   }
651 }
652 #endif  // CHRE_SLPI_UIMG_ENABLED
653 
getSensors()654 DynamicVector<Sensor> PlatformSensorManager::getSensors() {
655   DynamicVector<Sensor> sensors;
656   DynamicVector<SuidAttr> tempSensors;
657   if (!getSuidAndAttrs(mSeeHelper, "sensor_temperature", &tempSensors,
658                        CHRE_SEE_NUM_TEMP_SENSORS)) {
659     handleMissingSensor();
660   }
661 
662 #ifndef CHREX_SENSOR_SUPPORT
663   const char *kVendorDataTypes[] = {};
664 #endif  // CHREX_SENSOR_SUPPORT
665   constexpr size_t kNumSeeTypes = ARRAY_SIZE(kSeeDataTypes);
666   constexpr size_t kNumVendorTypes = ARRAY_SIZE(kVendorDataTypes);
667   for (size_t i = 0; i < kNumSeeTypes + kNumVendorTypes; i++) {
668     const char *dataType = (i < kNumSeeTypes)
669                                ? kSeeDataTypes[i]
670                                : kVendorDataTypes[i - kNumSeeTypes];
671 
672     uint8_t sensorType;
673     if (!getSensorTypeFromDataType(dataType, false /* calibrated */,
674                                    &sensorType)) {
675       LOGE("Unknown sensor type found for '%s'", dataType);
676       continue;
677     }
678 
679     bool skipAdditionalTypes = false;
680 
681 #ifdef CHRE_SLPI_DEFAULT_BUILD
682     // Stationary and motion detect share the same dataType on the default build
683     if (sensorType == CHRE_SENSOR_TYPE_INSTANT_MOTION_DETECT &&
684         i == kNumSeeTypes - 1) {
685       sensorType = CHRE_SENSOR_TYPE_STATIONARY_DETECT;
686       // Skip additional types or InstantMotion will be added to the sensor list
687       // twice.
688       skipAdditionalTypes = true;
689     }
690 #endif
691 
692     uint16_t targetGroupMask = kDefaultTargetGroupMask;
693 #ifdef CHRE_SLPI_UIMG_ENABLED
694     if (sensorTypeSupportsBigImage(sensorType)) {
695       targetGroupMask = NanoappGroupIds::MicroImage;
696     }
697 #endif
698 
699     findAndAddSensorsForType(mSeeHelper, tempSensors, dataType, sensorType,
700                              targetGroupMask, skipAdditionalTypes, &sensors);
701   }
702 
703 #ifdef CHRE_SLPI_UIMG_ENABLED
704   getBigImageSensors(&sensors);
705 #endif
706 
707   return sensors;
708 }
709 
configureSensor(Sensor & sensor,const SensorRequest & request)710 bool PlatformSensorManager::configureSensor(Sensor &sensor,
711                                             const SensorRequest &request) {
712   uint8_t sensorType = sensor.getSensorType();
713 #ifdef CHRE_SLPI_UIMG_ENABLED
714   if (isBigImageSensor(sensor)) {
715     sensorType = getBigImageSensorType(sensorType);
716   }
717 #endif
718 
719   SeeSensorRequest req = {
720       .sensorType = sensorType,
721       .enable = (request.getMode() != SensorMode::Off),
722       .passive = sensorModeIsPassive(request.getMode()),
723       .samplingRateHz = static_cast<float>(
724           kOneSecondInNanoseconds / request.getInterval().toRawNanoseconds()),
725       // Override batch period to 0 for micro-image non-continuous sensors to
726       // ensure one sample per batch so that nanoapps do not miss state changes.
727       .batchPeriodUs =
728 #ifdef CHRE_SLPI_UIMG_ENABLED
729           (!sensor.isContinuous() && !isBigImageSensor(sensor))
730               ? 0
731               :
732 #endif
733               static_cast<uint32_t>(request.getLatency().toRawNanoseconds() /
734                                     kOneMicrosecondInNanoseconds),
735   };
736 
737   SeeHelper &seeHelper = getSeeHelperForSensor(sensor);
738   bool wasInUImage = slpiInUImage();
739 
740   bool success = seeHelper.makeRequest(req);
741 
742   // If we dropped into micro-image during that blocking call to SEE, go back
743   // to big image. This won't happen if the calling nanoapp is a big image one,
744   // but other code paths currently assume that we will only transition from big
745   // image to micro-image from CHRE's perspective while it's waiting for an
746   // event to arrive in its empty queue.
747   // TODO: transition back to big image only when needed, at the point of
748   // invoking a nanoapp's free event/message callback
749   if (!wasInUImage && slpiInUImage()) {
750     LOGD("Restoring big image operating mode");
751     slpiForceBigImage();
752   }
753 
754   if (success) {
755     // TODO: remove setSamplingStatus when .latency is available in status
756     // update from SEE.
757     struct chreSensorSamplingStatus status;
758     if (sensor.getSamplingStatus(&status)) {
759       // If passive request is not supported by this SEE sensor, it won't be
760       // dynamically enabled/disabled and its status stays the same as set here.
761       if (!sensor.supportsPassiveMode()) {
762         status.enabled = req.enable;
763       }
764       status.latency = req.batchPeriodUs * kOneMicrosecondInNanoseconds;
765       sensor.setSamplingStatus(status);
766     }
767   }
768   return success;
769 }
770 
configureBiasEvents(const Sensor & sensor,bool enable,uint64_t)771 bool PlatformSensorManager::configureBiasEvents(const Sensor &sensor,
772                                                 bool enable,
773                                                 uint64_t /* latencyNs */) {
774   // Big-image sensor types will be mapped into micro-image sensors so assume
775   // using mSeeHelper is OK.
776   SeeCalHelper *calHelper = mSeeHelper.getCalHelper();
777 
778   // Make sure it's the calibrated sensor type since SeeCalHelper only deals
779   // with calibrated types.
780   uint8_t calibratedType =
781       PlatformSensorTypeHelpers::toCalibratedSensorType(sensor.getSensorType());
782 
783   const sns_std_suid *suid =
784       calHelper->getCalSuidFromSensorType(calibratedType);
785   bool success = false;
786   if (suid != nullptr) {
787     if (enable != calHelper->areCalUpdatesEnabled(*suid)) {
788       success = calHelper->configureCalUpdates(*suid, enable, mSeeHelper);
789     } else {
790       // Return true since updates are already configured to the right state.
791       // This can happen when configuring big-image sensors since they currently
792       // map to the micro-image sensor type which may already be enabled.
793       success = true;
794     }
795   }
796   return success;
797 }
798 
getThreeAxisBias(const Sensor & sensor,struct chreSensorThreeAxisData * bias) const799 bool PlatformSensorManager::getThreeAxisBias(
800     const Sensor &sensor, struct chreSensorThreeAxisData *bias) const {
801   SeeCalHelper *calHelper = getSeeHelperForSensor(sensor).getCalHelper();
802 
803   bool success = sensor.reportsBiasEvents();
804   if (success) {
805     uint8_t sensorType = sensor.getSensorType();
806 
807     // We use the runtime-calibrated sensor type here, per documentation
808     // of SeeCalHelper::getBias(), but overwrite the sensorHandle to that of
809     // the current sensor, because the calibration data itself is equivalent
810     // for both calibrated/uncalibrated sensor types.
811     uint8_t calSensorType =
812         PlatformSensorTypeHelpers::toCalibratedSensorType(sensorType);
813     if (!calHelper->getBias(calSensorType, bias)) {
814       // Set to zero bias + unknown accuracy per CHRE API requirements.
815       memset(bias, 0, sizeof(chreSensorThreeAxisData));
816       bias->header.readingCount = 1;
817       bias->header.accuracy = CHRE_SENSOR_ACCURACY_UNKNOWN;
818     }
819 
820     // Overwrite sensorHandle to match the request type.
821     getSensorRequestManager().getSensorHandle(sensorType, 0 /* sensorIndex */,
822                                               sensor.getTargetGroupMask(),
823                                               &bias->header.sensorHandle);
824   }
825 
826   return success;
827 }
828 
flush(const Sensor & sensor,uint32_t * flushRequestId)829 bool PlatformSensorManager::flush(const Sensor &sensor,
830                                   uint32_t *flushRequestId) {
831   uint8_t sensorType = sensor.getSensorType();
832 #ifdef CHRE_SLPI_UIMG_ENABLED
833   if (isBigImageSensor(sensor)) {
834     sensorType = getBigImageSensorType(sensorType);
835   }
836 #endif
837   return getSeeHelperForSensor(sensor).flush(sensorType);
838 }
839 
releaseSamplingStatusUpdate(struct chreSensorSamplingStatus * status)840 void PlatformSensorManager::releaseSamplingStatusUpdate(
841     struct chreSensorSamplingStatus *status) {
842   memoryFree(status);
843 }
844 
releaseSensorDataEvent(void * data)845 void PlatformSensorManager::releaseSensorDataEvent(void *data) {
846   memoryFree(data);
847 }
848 
releaseBiasEvent(void * biasData)849 void PlatformSensorManager::releaseBiasEvent(void *biasData) {
850   memoryFree(biasData);
851 }
852 
onSamplingStatusUpdate(UniquePtr<SeeHelperCallbackInterface::SamplingStatusData> && status)853 void PlatformSensorManagerBase::onSamplingStatusUpdate(
854     UniquePtr<SeeHelperCallbackInterface::SamplingStatusData> &&status) {
855   uint32_t sensorHandle;
856 #ifdef CHRE_SLPI_UIMG_ENABLED
857   uint16_t targetGroupMask = NanoappGroupIds::MicroImage;
858   if (isBigImageSensorType(status->sensorType)) {
859     status->sensorType = getUimgSensorType(status->sensorType);
860     targetGroupMask = NanoappGroupIds::BigImage;
861   }
862 #else
863   uint16_t targetGroupMask = NanoappGroupIds::BigImage;
864 #endif
865   getSensorRequestManager().getSensorHandle(
866       status->sensorType, 0 /* sensorIndex */, targetGroupMask, &sensorHandle);
867   Sensor *sensor = getSensorRequestManager().getSensor(sensorHandle);
868 
869   if (sensor != nullptr) {
870     postSamplingUpdateForSensor(sensor, sensorHandle, std::move(status));
871   }
872 }
873 
onSensorDataEvent(uint8_t sensorType,UniquePtr<uint8_t> && eventData)874 void PlatformSensorManagerBase::onSensorDataEvent(
875     uint8_t sensorType, UniquePtr<uint8_t> &&eventData) {
876   uint32_t sensorHandle;
877 #ifdef CHRE_SLPI_UIMG_ENABLED
878   uint16_t targetGroupMask = NanoappGroupIds::MicroImage;
879   if (isBigImageSensorType(sensorType)) {
880     sensorType = getUimgSensorType(sensorType);
881     targetGroupMask = NanoappGroupIds::BigImage;
882   }
883 #else
884   uint16_t targetGroupMask = NanoappGroupIds::BigImage;
885 #endif
886 
887   getSensorRequestManager().getSensorHandle(sensorType, 0 /* sensorIndex */,
888                                             targetGroupMask, &sensorHandle);
889   auto *header =
890       reinterpret_cast<struct chreSensorDataHeader *>(eventData.get());
891   header->sensorHandle = sensorHandle;
892 
893   getSensorRequestManager().handleSensorDataEvent(sensorHandle,
894                                                   eventData.release());
895 }
896 
onHostWakeSuspendEvent(bool awake)897 void PlatformSensorManagerBase::onHostWakeSuspendEvent(bool awake) {
898   // Host wake events are sent as soon as SEE is up so verify the event loop is
899   // up before attempting to post the event to avoid a crash.
900   if (EventLoopManagerSingleton::isInitialized()) {
901     EventLoopManagerSingleton::get()
902         ->getEventLoop()
903         .getPowerControlManager()
904         .onHostWakeSuspendEvent(awake);
905   }
906 }
907 
onSensorBiasEvent(uint8_t sensorType,UniquePtr<struct chreSensorThreeAxisData> && biasData)908 void PlatformSensorManagerBase::onSensorBiasEvent(
909     uint8_t sensorType, UniquePtr<struct chreSensorThreeAxisData> &&biasData) {
910   // A single bias update is sent for both uncal / cal types that also needs to
911   // be sent for any big-image calibrated sensors. Currently, this requires that
912   // we post up to 4 separate events for a single invocation of this method.
913 
914   uint16_t targetGroupMask;
915   uint8_t uncalSensorType =
916       SensorTypeHelpers::toUncalibratedSensorType(sensorType);
917 #ifdef CHRE_SLPI_UIMG_ENABLED
918   targetGroupMask = NanoappGroupIds::BigImage;
919   if (sensorTypeSupportsBigImage(sensorType)) {
920     postSensorBiasEvent(sensorType, targetGroupMask, *biasData);
921   }
922   if (sensorTypeSupportsBigImage(uncalSensorType)) {
923     postSensorBiasEvent(uncalSensorType, targetGroupMask, *biasData);
924   }
925 #endif
926 
927   targetGroupMask =
928 #ifdef CHRE_SLPI_UIMG_ENABLED
929       NanoappGroupIds::MicroImage;
930 #else
931       NanoappGroupIds::BigImage;
932 #endif
933   postSensorBiasEvent(sensorType, targetGroupMask, *biasData);
934   postSensorBiasEvent(uncalSensorType, targetGroupMask, *biasData);
935 }
936 
onFlushCompleteEvent(uint8_t sensorType)937 void PlatformSensorManagerBase::onFlushCompleteEvent(uint8_t sensorType) {
938   // Flush complete events are sent after any batch delivery so verify the
939   // event loop is up before attempting to post the event to avoid a crash.
940   if (EventLoopManagerSingleton::isInitialized()) {
941     // TODO: Have SEE pass flush request IDs through the flush complete event
942     uint32_t sensorHandle;
943 #ifdef CHRE_SLPI_UIMG_ENABLED
944     uint16_t targetGroupMask = NanoappGroupIds::MicroImage;
945     if (isBigImageSensorType(sensorType)) {
946       targetGroupMask = NanoappGroupIds::BigImage;
947       sensorType = getUimgSensorType(sensorType);
948     }
949 #else
950     uint16_t targetGroupMask = NanoappGroupIds::BigImage;
951 #endif
952 
953     getSensorRequestManager().getSensorHandle(sensorType, 0 /* sensorIndex */,
954                                               targetGroupMask, &sensorHandle);
955     getSensorRequestManager().handleFlushCompleteEvent(
956         sensorHandle, UINT32_MAX, /* invalid flush request ID */
957         CHRE_ERROR_NONE);
958   }
959 }
960 
961 }  // namespace chre
962