• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "EmulatedRequestProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 
20 #include "EmulatedRequestProcessor.h"
21 
22 #include <HandleImporter.h>
23 #include <hardware/gralloc.h>
24 #include <log/log.h>
25 #include <sync/sync.h>
26 #include <utils/Timers.h>
27 #include <utils/Trace.h>
28 
29 #include <memory>
30 
31 #include "GrallocSensorBuffer.h"
32 
33 namespace android {
34 
35 using ::android::frameworks::sensorservice::V1_0::ISensorManager;
36 using ::android::frameworks::sensorservice::V1_0::Result;
37 using android::hardware::camera::common::V1_0::helper::HandleImporter;
38 using ::android::hardware::sensors::V1_0::SensorInfo;
39 using ::android::hardware::sensors::V1_0::SensorType;
40 using google_camera_hal::ErrorCode;
41 using google_camera_hal::HwlPipelineResult;
42 using google_camera_hal::MessageType;
43 using google_camera_hal::NotifyMessage;
44 
EmulatedRequestProcessor(uint32_t camera_id,sp<EmulatedSensor> sensor,const HwlSessionCallback & session_callback)45 EmulatedRequestProcessor::EmulatedRequestProcessor(
46     uint32_t camera_id, sp<EmulatedSensor> sensor,
47     const HwlSessionCallback& session_callback)
48     : camera_id_(camera_id),
49       sensor_(sensor),
50       session_callback_(session_callback),
51       request_state_(std::make_unique<EmulatedLogicalRequestState>(camera_id)) {
52   ATRACE_CALL();
53   request_thread_ = std::thread([this] { this->RequestProcessorLoop(); });
54   importer_ = std::make_shared<HandleImporter>();
55 }
56 
~EmulatedRequestProcessor()57 EmulatedRequestProcessor::~EmulatedRequestProcessor() {
58   ATRACE_CALL();
59   processor_done_ = true;
60   request_thread_.join();
61 
62   auto ret = sensor_->ShutDown();
63   if (ret != OK) {
64     ALOGE("%s: Failed during sensor shutdown %s (%d)", __FUNCTION__,
65           strerror(-ret), ret);
66   }
67 
68   if (sensor_event_queue_.get() != nullptr) {
69     sensor_event_queue_->disableSensor(sensor_handle_);
70     sensor_event_queue_.clear();
71     sensor_event_queue_ = nullptr;
72   }
73 }
74 
ProcessPipelineRequests(uint32_t frame_number,std::vector<HwlPipelineRequest> & requests,const std::vector<EmulatedPipeline> & pipelines,const DynamicStreamIdMapType & dynamic_stream_id_map,bool use_default_physical_camera)75 status_t EmulatedRequestProcessor::ProcessPipelineRequests(
76     uint32_t frame_number, std::vector<HwlPipelineRequest>& requests,
77     const std::vector<EmulatedPipeline>& pipelines,
78     const DynamicStreamIdMapType& dynamic_stream_id_map,
79     bool use_default_physical_camera) {
80   ATRACE_CALL();
81   status_t res = OK;
82 
83   std::unique_lock<std::mutex> lock(process_mutex_);
84 
85   for (auto& request : requests) {
86     if (request.pipeline_id >= pipelines.size()) {
87       ALOGE("%s: Pipeline request with invalid pipeline id: %u", __FUNCTION__,
88             request.pipeline_id);
89       return BAD_VALUE;
90     }
91 
92     while (pending_requests_.size() > EmulatedSensor::kPipelineDepth) {
93       auto result = request_condition_.wait_for(
94           lock, std::chrono::nanoseconds(
95                     EmulatedSensor::kSupportedFrameDurationRange[1]));
96       if (result == std::cv_status::timeout) {
97         ALOGE("%s: Timed out waiting for a pending request slot", __FUNCTION__);
98         return TIMED_OUT;
99       }
100     }
101 
102     res = request_state_->UpdateRequestForDynamicStreams(
103         &request, pipelines, dynamic_stream_id_map, use_default_physical_camera);
104     if (res != OK) {
105       ALOGE("%s: Failed to update request for dynamic streams: %s(%d)",
106             __FUNCTION__, strerror(-res), res);
107       return res;
108     }
109 
110     auto output_buffers = CreateSensorBuffers(
111         frame_number, request.output_buffers,
112         pipelines[request.pipeline_id].streams, request.pipeline_id,
113         pipelines[request.pipeline_id].cb, /*override_width*/ 0,
114         /*override_height*/ 0);
115     if (output_buffers == nullptr) {
116       return NO_MEMORY;
117     }
118 
119     auto input_buffers = CreateSensorBuffers(
120         frame_number, request.input_buffers,
121         pipelines[request.pipeline_id].streams, request.pipeline_id,
122         pipelines[request.pipeline_id].cb, request.input_width,
123         request.input_height);
124 
125     pending_requests_.push(
126         {.settings = HalCameraMetadata::Clone(request.settings.get()),
127          .input_buffers = std::move(input_buffers),
128          .output_buffers = std::move(output_buffers)});
129   }
130 
131   return OK;
132 }
133 
CreateSensorBuffers(uint32_t frame_number,const std::vector<StreamBuffer> & buffers,const std::unordered_map<uint32_t,EmulatedStream> & streams,uint32_t pipeline_id,HwlPipelineCallback cb,int32_t override_width,int32_t override_height)134 std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers(
135     uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
136     const std::unordered_map<uint32_t, EmulatedStream>& streams,
137     uint32_t pipeline_id, HwlPipelineCallback cb, int32_t override_width,
138     int32_t override_height) {
139   if (buffers.empty()) {
140     return nullptr;
141   }
142 
143   std::vector<StreamBuffer> requested_buffers;
144   for (auto& buffer : buffers) {
145     if (buffer.buffer != nullptr) {
146       requested_buffers.push_back(buffer);
147       continue;
148     }
149 
150     if (session_callback_.request_stream_buffers != nullptr) {
151       std::vector<StreamBuffer> one_requested_buffer;
152       status_t res = session_callback_.request_stream_buffers(
153           buffer.stream_id, 1, &one_requested_buffer, frame_number);
154       if (res != OK) {
155         ALOGE("%s: request_stream_buffers failed: %s(%d)", __FUNCTION__,
156               strerror(-res), res);
157         continue;
158       }
159       if (one_requested_buffer.size() != 1 ||
160           one_requested_buffer[0].buffer == nullptr) {
161         ALOGE("%s: request_stream_buffers failed to return a valid buffer",
162               __FUNCTION__);
163         continue;
164       }
165       requested_buffers.push_back(one_requested_buffer[0]);
166     }
167   }
168 
169   if (requested_buffers.size() < buffers.size()) {
170     ALOGE(
171         "%s: Failed to acquire all sensor buffers: %zu acquired, %zu requested",
172         __FUNCTION__, requested_buffers.size(), buffers.size());
173     // This only happens for HAL buffer manager use case.
174     if (session_callback_.return_stream_buffers != nullptr) {
175       session_callback_.return_stream_buffers(requested_buffers);
176     }
177     return nullptr;
178   }
179 
180   auto sensor_buffers = std::make_unique<Buffers>();
181   sensor_buffers->reserve(requested_buffers.size());
182   for (auto& buffer : requested_buffers) {
183     auto sensor_buffer = CreateSensorBuffer(
184         frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer,
185         override_width, override_height);
186     if (sensor_buffer.get() != nullptr) {
187       sensor_buffers->push_back(std::move(sensor_buffer));
188     }
189   }
190 
191   return sensor_buffers;
192 }
193 
NotifyFailedRequest(const PendingRequest & request)194 void EmulatedRequestProcessor::NotifyFailedRequest(const PendingRequest& request) {
195   if (request.output_buffers->at(0)->callback.notify != nullptr) {
196     // Mark all output buffers for this request in order not to send
197     // ERROR_BUFFER for them.
198     for (auto& output_buffer : *(request.output_buffers)) {
199       output_buffer->is_failed_request = true;
200     }
201 
202     auto output_buffer = std::move(request.output_buffers->at(0));
203     NotifyMessage msg = {
204         .type = MessageType::kError,
205         .message.error = {.frame_number = output_buffer->frame_number,
206                           .error_stream_id = -1,
207                           .error_code = ErrorCode::kErrorRequest}};
208     output_buffer->callback.notify(output_buffer->pipeline_id, msg);
209   }
210 }
211 
Flush()212 status_t EmulatedRequestProcessor::Flush() {
213   std::lock_guard<std::mutex> lock(process_mutex_);
214   // First flush in-flight requests
215   auto ret = sensor_->Flush();
216 
217   // Then the rest of the pending requests
218   while (!pending_requests_.empty()) {
219     const auto& request = pending_requests_.front();
220     NotifyFailedRequest(request);
221     pending_requests_.pop();
222   }
223 
224   return ret;
225 }
226 
GetBufferSizeAndStride(const EmulatedStream & stream,buffer_handle_t buffer,uint32_t * size,uint32_t * stride)227 status_t EmulatedRequestProcessor::GetBufferSizeAndStride(
228     const EmulatedStream& stream, buffer_handle_t buffer,
229     uint32_t* size /*out*/, uint32_t* stride /*out*/) {
230   if (size == nullptr) {
231     return BAD_VALUE;
232   }
233 
234   switch (stream.override_format) {
235     case HAL_PIXEL_FORMAT_RGB_888:
236       *stride = stream.width * 3;
237       *size = (*stride) * stream.height;
238       break;
239     case HAL_PIXEL_FORMAT_RGBA_8888:
240       *stride = stream.width * 4;
241       *size = (*stride) * stream.height;
242       break;
243     case HAL_PIXEL_FORMAT_Y16:
244       if (stream.override_data_space == HAL_DATASPACE_DEPTH) {
245         *stride = AlignTo(AlignTo(stream.width, 2) * 2, 16);
246         *size = (*stride) * AlignTo(stream.height, 2);
247       } else {
248         return BAD_VALUE;
249       }
250       break;
251     case HAL_PIXEL_FORMAT_BLOB:
252       if (stream.override_data_space == HAL_DATASPACE_V0_JFIF) {
253         *size = stream.buffer_size;
254         *stride = *size;
255       } else {
256         return BAD_VALUE;
257       }
258       break;
259     case HAL_PIXEL_FORMAT_RAW16:
260       if (importer_->getMonoPlanarStrideBytes(buffer, stride) != NO_ERROR) {
261         *stride = stream.width * 2;
262       }
263       *size = (*stride) * stream.height;
264       break;
265     default:
266       return BAD_VALUE;
267   }
268 
269   return OK;
270 }
271 
LockSensorBuffer(const EmulatedStream & stream,buffer_handle_t buffer,int32_t width,int32_t height,SensorBuffer * sensor_buffer)272 status_t EmulatedRequestProcessor::LockSensorBuffer(
273     const EmulatedStream& stream, buffer_handle_t buffer, int32_t width,
274     int32_t height, SensorBuffer* sensor_buffer /*out*/) {
275   if (sensor_buffer == nullptr) {
276     return BAD_VALUE;
277   }
278 
279   auto usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN;
280   bool isYUV_420_888 = stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888;
281   bool isP010 = static_cast<android_pixel_format_v1_1_t>(
282                     stream.override_format) == HAL_PIXEL_FORMAT_YCBCR_P010;
283   if ((isYUV_420_888) || (isP010)) {
284     IMapper::Rect map_rect = {0, 0, width, height};
285     auto yuv_layout = importer_->lockYCbCr(buffer, usage, map_rect);
286     if ((yuv_layout.y != nullptr) && (yuv_layout.cb != nullptr) &&
287         (yuv_layout.cr != nullptr)) {
288       sensor_buffer->plane.img_y_crcb.img_y =
289           static_cast<uint8_t*>(yuv_layout.y);
290       sensor_buffer->plane.img_y_crcb.img_cb =
291           static_cast<uint8_t*>(yuv_layout.cb);
292       sensor_buffer->plane.img_y_crcb.img_cr =
293           static_cast<uint8_t*>(yuv_layout.cr);
294       sensor_buffer->plane.img_y_crcb.y_stride = yuv_layout.yStride;
295       sensor_buffer->plane.img_y_crcb.cbcr_stride = yuv_layout.cStride;
296       sensor_buffer->plane.img_y_crcb.cbcr_step = yuv_layout.chromaStep;
297       if (isYUV_420_888 && (yuv_layout.chromaStep == 2) &&
298           std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
299                    sensor_buffer->plane.img_y_crcb.img_cr) != 1) {
300         ALOGE("%s: Unsupported YUV layout, chroma step: %u U/V plane delta: %u",
301               __FUNCTION__, yuv_layout.chromaStep,
302               static_cast<unsigned>(
303                   std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
304                            sensor_buffer->plane.img_y_crcb.img_cr)));
305         return BAD_VALUE;
306       }
307       sensor_buffer->plane.img_y_crcb.bytesPerPixel = isP010 ? 2 : 1;
308     } else {
309       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
310       return BAD_VALUE;
311     }
312   } else {
313     uint32_t buffer_size = 0, stride = 0;
314     auto ret = GetBufferSizeAndStride(stream, buffer, &buffer_size, &stride);
315     if (ret != OK) {
316       ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__,
317             stream.override_format);
318       return BAD_VALUE;
319     }
320     if (stream.override_format == HAL_PIXEL_FORMAT_BLOB) {
321       sensor_buffer->plane.img.img =
322           static_cast<uint8_t*>(importer_->lock(buffer, usage, buffer_size));
323     } else {
324       IMapper::Rect region{0, 0, width, height};
325       sensor_buffer->plane.img.img =
326           static_cast<uint8_t*>(importer_->lock(buffer, usage, region));
327     }
328     if (sensor_buffer->plane.img.img == nullptr) {
329       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
330       return BAD_VALUE;
331     }
332     sensor_buffer->plane.img.stride_in_bytes = stride;
333     sensor_buffer->plane.img.buffer_size = buffer_size;
334   }
335 
336   return OK;
337 }
338 
CreateSensorBuffer(uint32_t frame_number,const EmulatedStream & emulated_stream,uint32_t pipeline_id,HwlPipelineCallback callback,StreamBuffer stream_buffer,int32_t override_width,int32_t override_height)339 std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer(
340     uint32_t frame_number, const EmulatedStream& emulated_stream,
341     uint32_t pipeline_id, HwlPipelineCallback callback,
342     StreamBuffer stream_buffer, int32_t override_width,
343     int32_t override_height) {
344   auto buffer = std::make_unique<GrallocSensorBuffer>(importer_);
345 
346   auto stream = emulated_stream;
347   // Make sure input stream formats are correctly mapped here
348   if (stream.is_input) {
349     stream.override_format = EmulatedSensor::OverrideFormat(
350         stream.override_format,
351         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
352   }
353   if (override_width > 0 && override_height > 0) {
354     buffer->width = override_width;
355     buffer->height = override_height;
356   } else {
357     buffer->width = stream.width;
358     buffer->height = stream.height;
359   }
360   buffer->format = static_cast<PixelFormat>(stream.override_format);
361   buffer->dataSpace = stream.override_data_space;
362   buffer->stream_buffer = stream_buffer;
363   buffer->pipeline_id = pipeline_id;
364   buffer->callback = callback;
365   buffer->frame_number = frame_number;
366   buffer->camera_id = emulated_stream.is_physical_camera_stream
367                           ? emulated_stream.physical_camera_id
368                           : camera_id_;
369   buffer->is_input = stream.is_input;
370   // In case buffer processing is successful, flip this flag accordingly
371   buffer->stream_buffer.status = BufferStatus::kError;
372 
373   if (buffer->stream_buffer.buffer != nullptr) {
374     auto ret = LockSensorBuffer(stream, buffer->stream_buffer.buffer,
375                                 buffer->width, buffer->height, buffer.get());
376     if (ret != OK) {
377       buffer.release();
378       buffer = nullptr;
379     }
380   }
381 
382   if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) {
383     auto fence_status = importer_->importFence(stream_buffer.acquire_fence,
384                                                buffer->acquire_fence_fd);
385     if (!fence_status) {
386       ALOGE("%s: Failed importing acquire fence!", __FUNCTION__);
387       buffer.release();
388       buffer = nullptr;
389     }
390   }
391 
392   return buffer;
393 }
394 
AcquireBuffers(Buffers * buffers)395 std::unique_ptr<Buffers> EmulatedRequestProcessor::AcquireBuffers(
396     Buffers* buffers) {
397   if ((buffers == nullptr) || (buffers->empty())) {
398     return nullptr;
399   }
400 
401   auto acquired_buffers = std::make_unique<Buffers>();
402   acquired_buffers->reserve(buffers->size());
403   auto output_buffer = buffers->begin();
404   while (output_buffer != buffers->end()) {
405     status_t ret = OK;
406     if ((*output_buffer)->acquire_fence_fd >= 0) {
407       ret = sync_wait((*output_buffer)->acquire_fence_fd,
408                       ns2ms(EmulatedSensor::kSupportedFrameDurationRange[1]));
409       if (ret != OK) {
410         ALOGE("%s: Fence sync failed: %s, (%d)", __FUNCTION__, strerror(-ret),
411               ret);
412       }
413     }
414 
415     if (ret == OK) {
416       acquired_buffers->push_back(std::move(*output_buffer));
417     }
418 
419     output_buffer = buffers->erase(output_buffer);
420   }
421 
422   return acquired_buffers;
423 }
424 
RequestProcessorLoop()425 void EmulatedRequestProcessor::RequestProcessorLoop() {
426   ATRACE_CALL();
427 
428   bool vsync_status_ = true;
429   while (!processor_done_ && vsync_status_) {
430     {
431       std::lock_guard<std::mutex> lock(process_mutex_);
432       if (!pending_requests_.empty()) {
433         status_t ret;
434         const auto& request = pending_requests_.front();
435         auto frame_number = request.output_buffers->at(0)->frame_number;
436         auto notify_callback = request.output_buffers->at(0)->callback;
437         auto pipeline_id = request.output_buffers->at(0)->pipeline_id;
438 
439         auto output_buffers = AcquireBuffers(request.output_buffers.get());
440         auto input_buffers = AcquireBuffers(request.input_buffers.get());
441         if (!output_buffers->empty()) {
442           std::unique_ptr<EmulatedSensor::LogicalCameraSettings> logical_settings =
443               std::make_unique<EmulatedSensor::LogicalCameraSettings>();
444 
445           std::unique_ptr<std::set<uint32_t>> physical_camera_output_ids =
446               std::make_unique<std::set<uint32_t>>();
447           for (const auto& it : *output_buffers) {
448             if (it->camera_id != camera_id_) {
449               physical_camera_output_ids->emplace(it->camera_id);
450             }
451           }
452 
453           // Repeating requests usually include valid settings only during the
454           // initial call. Afterwards an invalid settings pointer means that
455           // there are no changes in the parameters and Hal should re-use the
456           // last valid values.
457           // TODO: Add support for individual physical camera requests.
458           if (request.settings.get() != nullptr) {
459             ret = request_state_->InitializeLogicalSettings(
460                 HalCameraMetadata::Clone(request.settings.get()),
461                 std::move(physical_camera_output_ids), logical_settings.get());
462             last_settings_ = HalCameraMetadata::Clone(request.settings.get());
463           } else {
464             ret = request_state_->InitializeLogicalSettings(
465                 HalCameraMetadata::Clone(last_settings_.get()),
466                 std::move(physical_camera_output_ids), logical_settings.get());
467           }
468 
469           if (ret == OK) {
470             auto result = request_state_->InitializeLogicalResult(pipeline_id,
471                                                                   frame_number);
472             // The screen rotation will be the same for all logical and physical devices
473             uint32_t screen_rotation = screen_rotation_;
474             for (auto it = logical_settings->begin();
475                  it != logical_settings->end(); it++) {
476               it->second.screen_rotation = screen_rotation;
477             }
478 
479             sensor_->SetCurrentRequest(
480                 std::move(logical_settings), std::move(result),
481                 std::move(input_buffers), std::move(output_buffers));
482           } else {
483             NotifyMessage msg{.type = MessageType::kError,
484                               .message.error = {
485                                   .frame_number = frame_number,
486                                   .error_stream_id = -1,
487                                   .error_code = ErrorCode::kErrorResult,
488                               }};
489 
490             notify_callback.notify(pipeline_id, msg);
491           }
492         } else {
493           // No further processing is needed, just fail the result which will
494           // complete this request.
495           NotifyMessage msg{.type = MessageType::kError,
496                             .message.error = {
497                                 .frame_number = frame_number,
498                                 .error_stream_id = -1,
499                                 .error_code = ErrorCode::kErrorResult,
500                             }};
501 
502           notify_callback.notify(pipeline_id, msg);
503         }
504 
505         pending_requests_.pop();
506         request_condition_.notify_one();
507       }
508     }
509 
510     vsync_status_ =
511         sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]);
512   }
513 }
514 
Initialize(std::unique_ptr<HalCameraMetadata> static_meta,PhysicalDeviceMapPtr physical_devices)515 status_t EmulatedRequestProcessor::Initialize(
516     std::unique_ptr<HalCameraMetadata> static_meta,
517     PhysicalDeviceMapPtr physical_devices) {
518   std::lock_guard<std::mutex> lock(process_mutex_);
519   return request_state_->Initialize(std::move(static_meta),
520                                     std::move(physical_devices));
521 }
522 
SetSessionCallback(const HwlSessionCallback & hwl_session_callback)523 void EmulatedRequestProcessor::SetSessionCallback(
524     const HwlSessionCallback& hwl_session_callback) {
525   std::lock_guard<std::mutex> lock(process_mutex_);
526   session_callback_ = hwl_session_callback;
527 }
528 
GetDefaultRequest(RequestTemplate type,std::unique_ptr<HalCameraMetadata> * default_settings)529 status_t EmulatedRequestProcessor::GetDefaultRequest(
530     RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
531   std::lock_guard<std::mutex> lock(process_mutex_);
532   return request_state_->GetDefaultRequest(type, default_settings);
533 }
534 
onEvent(const Event & e)535 Return<void> EmulatedRequestProcessor::SensorHandler::onEvent(const Event& e) {
536   auto processor = processor_.lock();
537   if (processor.get() == nullptr) {
538     return Void();
539   }
540 
541   if (e.sensorType == SensorType::ACCELEROMETER) {
542     // Heuristic approach for deducing the screen
543     // rotation depending on the reported
544     // accelerometer readings. We switch
545     // the screen rotation when one of the
546     // x/y axis gets close enough to the earth
547     // acceleration.
548     const uint32_t earth_accel = 9;  // Switch threshold [m/s^2]
549     uint32_t x_accel = e.u.vec3.x;
550     uint32_t y_accel = e.u.vec3.y;
551     if (x_accel == earth_accel) {
552       processor->screen_rotation_ = 270;
553     } else if (x_accel == -earth_accel) {
554       processor->screen_rotation_ = 90;
555     } else if (y_accel == -earth_accel) {
556       processor->screen_rotation_ = 180;
557     } else {
558       processor->screen_rotation_ = 0;
559     }
560   } else {
561     ALOGE("%s: unexpected event received type: %d", __func__, e.sensorType);
562   }
563   return Void();
564 }
565 
InitializeSensorQueue(std::weak_ptr<EmulatedRequestProcessor> processor)566 void EmulatedRequestProcessor::InitializeSensorQueue(
567     std::weak_ptr<EmulatedRequestProcessor> processor) {
568   if (sensor_event_queue_.get() != nullptr) {
569     return;
570   }
571 
572   sp<ISensorManager> manager = ISensorManager::getService();
573   if (manager == nullptr) {
574     ALOGE("%s: Cannot get ISensorManager", __func__);
575   } else {
576     bool sensor_found = false;
577     manager->getSensorList([&](const auto& list, auto result) {
578       if (result != Result::OK) {
579         ALOGE("%s: Failed to retrieve sensor list!", __func__);
580       } else {
581         for (const SensorInfo& it : list) {
582           if (it.type == SensorType::ACCELEROMETER) {
583             sensor_found = true;
584             sensor_handle_ = it.sensorHandle;
585           }
586         }
587       }
588     });
589     if (sensor_found) {
590       manager->createEventQueue(
591           new SensorHandler(processor), [&](const auto& q, auto result) {
592             if (result != Result::OK) {
593               ALOGE("%s: Cannot create event queue", __func__);
594               return;
595             }
596             sensor_event_queue_ = q;
597           });
598 
599       if (sensor_event_queue_.get() != nullptr) {
600         auto res = sensor_event_queue_->enableSensor(
601             sensor_handle_,
602             ns2us(EmulatedSensor::kSupportedFrameDurationRange[0]),
603             0 /*maxBatchReportLatencyUs*/);
604         if (res.isOk()) {
605         } else {
606           ALOGE("%s: Failed to enable sensor", __func__);
607         }
608       } else {
609         ALOGE("%s: Failed to create event queue", __func__);
610       }
611     }
612   }
613 }
614 
615 }  // namespace android
616