• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "EvsMockCamera.h"
18 
19 #include <aidl/android/hardware/automotive/evs/EvsResult.h>
20 
21 #include <aidlcommonsupport/NativeHandle.h>
22 #include <android-base/logging.h>
23 #include <ui/GraphicBufferAllocator.h>
24 #include <ui/GraphicBufferMapper.h>
25 #include <utils/SystemClock.h>
26 
27 #include <cstddef>
28 #include <cstdint>
29 #include <memory>
30 #include <tuple>
31 
32 namespace {
33 
34 using ::aidl::android::hardware::graphics::common::BufferUsage;
35 using ::ndk::ScopedAStatus;
36 
37 // Colors for the colorbar test pattern in ABGR format
38 constexpr uint32_t kColors[] = {
39         0xFFFFFFFF,  // white
40         0xFF00FFFF,  // yellow
41         0xFFFFFF00,  // cyan
42         0xFF00FF00,  // green
43         0xFFFF00FF,  // fuchsia
44         0xFF0000FF,  // red
45         0xFFFF0000,  // blue
46         0xFF000000,  // black
47 };
48 constexpr size_t kNumColors = sizeof(kColors) / sizeof(kColors[0]);
49 
50 }  // namespace
51 
52 namespace aidl::android::hardware::automotive::evs::implementation {
53 
EvsMockCamera(Sigil sigil,const char * id,std::unique_ptr<ConfigManager::CameraInfo> & camInfo)54 EvsMockCamera::EvsMockCamera([[maybe_unused]] Sigil sigil, const char* id,
55                              std::unique_ptr<ConfigManager::CameraInfo>& camInfo)
56     : mCameraInfo(camInfo) {
57     LOG(DEBUG) << __FUNCTION__;
58 
59     /* set a camera id */
60     mDescription.id = id;
61 
62     /* set camera metadata */
63     if (camInfo) {
64         uint8_t* ptr = reinterpret_cast<uint8_t*>(camInfo->characteristics);
65         const size_t len = get_camera_metadata_size(camInfo->characteristics);
66         mDescription.metadata.insert(mDescription.metadata.end(), ptr, ptr + len);
67     }
68 
69     // Initialize parameters.
70     initializeParameters();
71 }
72 
initializeParameters()73 void EvsMockCamera::initializeParameters() {
74     mParams.emplace(
75             CameraParam::BRIGHTNESS,
76             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
77     mParams.emplace(
78             CameraParam::CONTRAST,
79             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
80     mParams.emplace(
81             CameraParam::SHARPNESS,
82             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
83 }
84 
85 // Methods from ::aidl::android::hardware::automotive::evs::IEvsCamera follow.
getCameraInfo(CameraDesc * _aidl_return)86 ScopedAStatus EvsMockCamera::getCameraInfo(CameraDesc* _aidl_return) {
87     LOG(DEBUG) << __FUNCTION__;
88 
89     // Send back our self description
90     *_aidl_return = mDescription;
91     return ScopedAStatus::ok();
92 }
93 
getExtendedInfo(int32_t opaqueIdentifier,std::vector<uint8_t> * opaqueValue)94 ScopedAStatus EvsMockCamera::getExtendedInfo(int32_t opaqueIdentifier,
95                                              std::vector<uint8_t>* opaqueValue) {
96     const auto it = mExtInfo.find(opaqueIdentifier);
97     if (it == mExtInfo.end()) {
98         return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
99     } else {
100         *opaqueValue = mExtInfo[opaqueIdentifier];
101     }
102 
103     return ScopedAStatus::ok();
104 }
105 
setExtendedInfo(int32_t opaqueIdentifier,const std::vector<uint8_t> & opaqueValue)106 ScopedAStatus EvsMockCamera::setExtendedInfo(int32_t opaqueIdentifier,
107                                              const std::vector<uint8_t>& opaqueValue) {
108     mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
109     return ScopedAStatus::ok();
110 }
111 
getPhysicalCameraInfo(const std::string & id,CameraDesc * _aidl_return)112 ScopedAStatus EvsMockCamera::getPhysicalCameraInfo([[maybe_unused]] const std::string& id,
113                                                    CameraDesc* _aidl_return) {
114     LOG(DEBUG) << __FUNCTION__;
115 
116     // This method works exactly same as getCameraInfo() in EVS HW module.
117     *_aidl_return = mDescription;
118     return ScopedAStatus::ok();
119 }
120 
setPrimaryClient()121 ScopedAStatus EvsMockCamera::setPrimaryClient() {
122     /* Because EVS HW module reference implementation expects a single client at
123      * a time, this returns a success code always.
124      */
125     return ScopedAStatus::ok();
126 }
127 
forcePrimaryClient(const std::shared_ptr<IEvsDisplay> &)128 ScopedAStatus EvsMockCamera::forcePrimaryClient(const std::shared_ptr<IEvsDisplay>&) {
129     /* Because EVS HW module reference implementation expects a single client at
130      * a time, this returns a success code always.
131      */
132     return ScopedAStatus::ok();
133 }
134 
unsetPrimaryClient()135 ScopedAStatus EvsMockCamera::unsetPrimaryClient() {
136     /* Because EVS HW module reference implementation expects a single client at
137      * a time, there is no chance that this is called by the secondary client and
138      * therefore returns a success code always.
139      */
140     return ScopedAStatus::ok();
141 }
142 
getParameterList(std::vector<CameraParam> * _aidl_return)143 ScopedAStatus EvsMockCamera::getParameterList(std::vector<CameraParam>* _aidl_return) {
144     if (mCameraInfo) {
145         _aidl_return->resize(mCameraInfo->controls.size());
146         auto idx = 0;
147         for (auto& [name, range] : mCameraInfo->controls) {
148             (*_aidl_return)[idx++] = name;
149         }
150     }
151 
152     return ScopedAStatus::ok();
153 }
154 
getIntParameterRange(CameraParam id,ParameterRange * _aidl_return)155 ScopedAStatus EvsMockCamera::getIntParameterRange(CameraParam id, ParameterRange* _aidl_return) {
156     auto it = mParams.find(id);
157     if (it == mParams.end()) {
158         return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
159     }
160 
161     _aidl_return->min = it->second->range.min;
162     _aidl_return->max = it->second->range.max;
163     _aidl_return->step = it->second->range.step;
164     return ScopedAStatus::ok();
165 }
166 
setIntParameter(CameraParam id,int32_t value,std::vector<int32_t> * effectiveValue)167 ScopedAStatus EvsMockCamera::setIntParameter(CameraParam id, int32_t value,
168                                              std::vector<int32_t>* effectiveValue) {
169     auto it = mParams.find(id);
170     if (it == mParams.end()) {
171         return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
172     }
173 
174     // Rounding down to the closest value.
175     int32_t candidate = value / it->second->range.step * it->second->range.step;
176     if (candidate < it->second->range.min || candidate > it->second->range.max) {
177         return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
178     }
179 
180     it->second->value = candidate;
181     effectiveValue->push_back(candidate);
182     return ScopedAStatus::ok();
183 }
184 
getIntParameter(CameraParam id,std::vector<int32_t> * value)185 ScopedAStatus EvsMockCamera::getIntParameter(CameraParam id, std::vector<int32_t>* value) {
186     auto it = mParams.find(id);
187     if (it == mParams.end()) {
188         return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
189     }
190 
191     value->push_back(it->second->value);
192     return ScopedAStatus::ok();
193 }
194 
195 // This is the asynchronous frame generation thread that runs in parallel with the
196 // main serving thread.  There is one for each active camera instance.
generateFrames()197 void EvsMockCamera::generateFrames() {
198     LOG(DEBUG) << "Frame generation loop started.";
199 
200     while (true) {
201         const nsecs_t startTime = systemTime(SYSTEM_TIME_MONOTONIC);
202         std::size_t bufferId = kInvalidBufferID;
203         buffer_handle_t bufferHandle = nullptr;
204         {
205             std::lock_guard lock(mMutex);
206             if (mStreamState != StreamState::RUNNING) {
207                 break;
208             }
209             std::tie(bufferId, bufferHandle) = useBuffer_unsafe();
210         }
211 
212         if (bufferHandle != nullptr) {
213             using AidlPixelFormat = ::aidl::android::hardware::graphics::common::PixelFormat;
214 
215             // Assemble the buffer description we'll transmit below
216             BufferDesc newBuffer = {
217                     .buffer =
218                             {
219                                     .description =
220                                             {
221                                                     .width = static_cast<int32_t>(mWidth),
222                                                     .height = static_cast<int32_t>(mHeight),
223                                                     .layers = 1,
224                                                     .format = static_cast<AidlPixelFormat>(mFormat),
225                                                     .usage = static_cast<BufferUsage>(mUsage),
226                                                     .stride = static_cast<int32_t>(mStride),
227                                             },
228                                     .handle = ::android::dupToAidl(bufferHandle),
229                             },
230                     .bufferId = static_cast<int32_t>(bufferId),
231                     .deviceId = mDescription.id,
232                     .timestamp = static_cast<int64_t>(::android::elapsedRealtimeNano() *
233                                                       1e+3),  // timestamps is in microseconds
234             };
235 
236             // Write test data into the image buffer
237             fillMockFrame(bufferHandle, reinterpret_cast<const AHardwareBuffer_Desc*>(
238                                                 &newBuffer.buffer.description));
239 
240             std::vector<BufferDesc> frames;
241             frames.push_back(std::move(newBuffer));
242 
243             // Issue the (asynchronous) callback to the client -- can't be holding the lock
244             if (mStream && mStream->deliverFrame(frames).isOk()) {
245                 LOG(DEBUG) << "Delivered " << bufferHandle << ", id = " << bufferId;
246             } else {
247                 // This can happen if the client dies and is likely unrecoverable.
248                 // To avoid consuming resources generating failing calls, we stop sending
249                 // frames.  Note, however, that the stream remains in the "STREAMING" state
250                 // until cleaned up on the main thread.
251                 LOG(ERROR) << "Frame delivery call failed in the transport layer.";
252                 doneWithFrame(frames);
253             }
254         }
255 
256         // We arbitrarily choose to generate frames at 15 fps to ensure we pass the 10fps test
257         // requirement
258         static const int kTargetFrameRate = 15;
259         static const nsecs_t kTargetFrameIntervalUs = 1000 * 1000 / kTargetFrameRate;
260         const nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
261         const nsecs_t elapsedTimeUs = (now - startTime) / 1000;
262         const nsecs_t sleepDurationUs = kTargetFrameIntervalUs - elapsedTimeUs;
263         if (sleepDurationUs > 0) {
264             usleep(sleepDurationUs);
265         }
266     }
267 
268     // If we've been asked to stop, send an event to signal the actual end of stream
269     EvsEventDesc event = {
270             .aType = EvsEventType::STREAM_STOPPED,
271     };
272     if (!mStream->notify(event).isOk()) {
273         ALOGE("Error delivering end of stream marker");
274     }
275 
276     return;
277 }
278 
fillMockFrame(buffer_handle_t handle,const AHardwareBuffer_Desc * pDesc)279 void EvsMockCamera::fillMockFrame(buffer_handle_t handle, const AHardwareBuffer_Desc* pDesc) {
280     // Lock our output buffer for writing
281     uint32_t* pixels = nullptr;
282     ::android::GraphicBufferMapper& mapper = ::android::GraphicBufferMapper::get();
283     mapper.lock(handle, GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
284                 ::android::Rect(pDesc->width, pDesc->height), (void**)&pixels);
285 
286     // If we failed to lock the pixel buffer, we're about to crash, but log it first
287     if (!pixels) {
288         ALOGE("Camera failed to gain access to image buffer for writing");
289         return;
290     }
291 
292     // Fill in the test pixels; the colorbar in ABGR format
293     for (unsigned row = 0; row < pDesc->height; row++) {
294         for (unsigned col = 0; col < pDesc->width; col++) {
295             const uint32_t index = col * kNumColors / pDesc->width;
296             pixels[col] = kColors[index];
297         }
298         // Point to the next row
299         // NOTE:  stride retrieved from gralloc is in units of pixels
300         pixels = pixels + pDesc->stride;
301     }
302 
303     // Release our output buffer
304     mapper.unlock(handle);
305 }
306 
allocateOneFrame(buffer_handle_t * handle)307 ::android::status_t EvsMockCamera::allocateOneFrame(buffer_handle_t* handle) {
308     static auto& alloc = ::android::GraphicBufferAllocator::get();
309     unsigned pixelsPerLine = 0;
310     const auto result = alloc.allocate(mWidth, mHeight, mFormat, 1, mUsage, handle, &pixelsPerLine,
311                                        0, "EvsMockCamera");
312     if (mStride < mWidth) {
313         // Gralloc defines stride in terms of pixels per line
314         mStride = pixelsPerLine;
315     } else if (mStride != pixelsPerLine) {
316         LOG(ERROR) << "We did not expect to get buffers with different strides!";
317     }
318     return result;
319 }
320 
startVideoStreamImpl_locked(const std::shared_ptr<evs::IEvsCameraStream> & receiver,ndk::ScopedAStatus &,std::unique_lock<std::mutex> &)321 bool EvsMockCamera::startVideoStreamImpl_locked(
322         const std::shared_ptr<evs::IEvsCameraStream>& receiver, ndk::ScopedAStatus& /* status */,
323         std::unique_lock<std::mutex>& /* lck */) {
324     mStream = receiver;
325     mCaptureThread = std::thread([this]() { generateFrames(); });
326     return true;
327 }
328 
stopVideoStreamImpl_locked(ndk::ScopedAStatus &,std::unique_lock<std::mutex> & lck)329 bool EvsMockCamera::stopVideoStreamImpl_locked(ndk::ScopedAStatus& /* status */,
330                                                std::unique_lock<std::mutex>& lck) {
331     lck.unlock();
332     if (mCaptureThread.joinable()) {
333         mCaptureThread.join();
334     }
335     lck.lock();
336     return true;
337 }
338 
postVideoStreamStop_locked(ndk::ScopedAStatus & status,std::unique_lock<std::mutex> & lck)339 bool EvsMockCamera::postVideoStreamStop_locked(ndk::ScopedAStatus& status,
340                                                std::unique_lock<std::mutex>& lck) {
341     if (!Base::postVideoStreamStop_locked(status, lck)) {
342         return false;
343     }
344     mStream = nullptr;
345     return true;
346 }
347 
Create(const char * deviceName)348 std::shared_ptr<EvsMockCamera> EvsMockCamera::Create(const char* deviceName) {
349     std::unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr;
350 
351     return Create(deviceName, nullCamInfo);
352 }
353 
Create(const char * deviceName,std::unique_ptr<ConfigManager::CameraInfo> & camInfo,const Stream * streamCfg)354 std::shared_ptr<EvsMockCamera> EvsMockCamera::Create(
355         const char* deviceName, std::unique_ptr<ConfigManager::CameraInfo>& camInfo,
356         [[maybe_unused]] const Stream* streamCfg) {
357     std::shared_ptr<EvsMockCamera> c =
358             ndk::SharedRefBase::make<EvsMockCamera>(Sigil{}, deviceName, camInfo);
359     if (!c) {
360         LOG(ERROR) << "Failed to instantiate EvsMockCamera.";
361         return nullptr;
362     }
363 
364     // Use the first resolution from the list for the testing
365     // TODO(b/214835237): Uses a given Stream configuration to choose the best
366     // stream configuration.
367     auto it = camInfo->streamConfigurations.begin();
368     c->mWidth = it->second.width;
369     c->mHeight = it->second.height;
370     c->mDescription.vendorFlags = 0xFFFFFFFF;  // Arbitrary test value
371 
372     c->mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
373     c->mUsage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_CAMERA_WRITE |
374                 GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_RARELY;
375 
376     return c;
377 }
378 
379 }  // namespace aidl::android::hardware::automotive::evs::implementation
380