blob: 999c51e6ba4abe1f7322b85d73a502c8943a954f [file] [log] [blame]
Jacopo Mondi667d8ea2019-05-10 17:40:02 +02001/* SPDX-License-Identifier: LGPL-2.1-or-later */
2/*
3 * Copyright (C) 2019, Google Inc.
4 *
5 * camera_device.cpp - libcamera Android Camera Device
6 */
7
8#include "camera_device.h"
9
10#include <system/camera_metadata.h>
11
12#include "log.h"
13
14#include "thread_rpc.h"
15
16using namespace libcamera;
17
18LOG_DECLARE_CATEGORY(HAL);
19
20/*
21 * \struct Camera3RequestDescriptor
22 *
23 * A utility structure that groups information about a capture request to be
24 * later re-used at request complete time to notify the framework.
25 */
26
27CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor(
28 unsigned int frameNumber, unsigned int numBuffers)
29 : frameNumber(frameNumber), numBuffers(numBuffers)
30{
31 buffers = new camera3_stream_buffer_t[numBuffers];
32}
33
34CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
35{
36 delete[] buffers;
37}
38
39/*
40 * \class CameraDevice
41 *
42 * The CameraDevice class wraps a libcamera::Camera instance, and implements
43 * the camera_device_t interface by handling RPC requests received from its
44 * associated CameraProxy.
45 *
46 * It translate parameters and operations from Camera HALv3 API to the libcamera
47 * ones to provide static information for a Camera, create request templates
48 * for it, process capture requests and then deliver capture results back
49 * to the framework using the designated callbacks.
50 */
51
52CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> &camera)
53 : running_(false), camera_(camera), staticMetadata_(nullptr),
54 requestTemplate_(nullptr)
55{
56 camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
57}
58
59CameraDevice::~CameraDevice()
60{
61 if (staticMetadata_)
62 free_camera_metadata(staticMetadata_);
63 staticMetadata_ = nullptr;
64
65 if (requestTemplate_)
66 free_camera_metadata(requestTemplate_);
67 requestTemplate_ = nullptr;
68}
69
70/*
71 * Handle RPC request received from the associated proxy.
72 */
Laurent Pinchart0c324332019-08-12 05:30:06 +030073void CameraDevice::call(ThreadRpc *rpc)
Jacopo Mondi667d8ea2019-05-10 17:40:02 +020074{
Jacopo Mondi667d8ea2019-05-10 17:40:02 +020075 switch (rpc->tag) {
76 case ThreadRpc::ProcessCaptureRequest:
77 processCaptureRequest(rpc->request);
78 break;
79 case ThreadRpc::Close:
80 close();
81 break;
82 default:
83 LOG(HAL, Error) << "Unknown RPC operation: " << rpc->tag;
84 }
85
86 rpc->notifyReception();
87}
88
89int CameraDevice::open()
90{
91 int ret = camera_->acquire();
92 if (ret) {
93 LOG(HAL, Error) << "Failed to acquire the camera";
94 return ret;
95 }
96
97 return 0;
98}
99
100void CameraDevice::close()
101{
102 camera_->stop();
103
104 camera_->freeBuffers();
105 camera_->release();
106
107 running_ = false;
108}
109
110void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
111{
112 callbacks_ = callbacks;
113}
114
115/*
116 * Return static information for the camera.
117 */
118camera_metadata_t *CameraDevice::getStaticMetadata()
119{
120 int ret;
121
122 if (staticMetadata_)
123 return staticMetadata_;
124
125 /*
126 * The here reported metadata are enough to implement a basic capture
127 * example application, but a real camera implementation will require
128 * more.
129 */
130
131 /* \todo Use correct sizes */
132 #define STATIC_ENTRY_CAP 256
133 #define STATIC_DATA_CAP 6688
134 camera_metadata_t *staticMetadata =
135 allocate_camera_metadata(STATIC_ENTRY_CAP, STATIC_DATA_CAP);
136
137 /* Sensor static metadata. */
138 int32_t pixelArraySize[] = {
139 2592, 1944,
140 };
141 ret = add_camera_metadata_entry(staticMetadata,
142 ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
143 &pixelArraySize, 2);
144 METADATA_ASSERT(ret);
145
146 int32_t sensorSizes[] = {
147 0, 0, 2560, 1920,
148 };
149 ret = add_camera_metadata_entry(staticMetadata,
150 ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
151 &sensorSizes, 4);
152 METADATA_ASSERT(ret);
153
154 int32_t sensitivityRange[] = {
155 32, 2400,
156 };
157 ret = add_camera_metadata_entry(staticMetadata,
158 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
159 &sensitivityRange, 2);
160 METADATA_ASSERT(ret);
161
162 uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG;
163 ret = add_camera_metadata_entry(staticMetadata,
164 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
165 &filterArr, 1);
166 METADATA_ASSERT(ret);
167
168 int64_t exposureTimeRange[] = {
169 100000, 200000000,
170 };
171 ret = add_camera_metadata_entry(staticMetadata,
172 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
173 &exposureTimeRange, 2);
174 METADATA_ASSERT(ret);
175
176 int32_t orientation = 0;
177 ret = add_camera_metadata_entry(staticMetadata,
178 ANDROID_SENSOR_ORIENTATION,
179 &orientation, 1);
180 METADATA_ASSERT(ret);
181
182 /* Flash static metadata. */
183 char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
184 ret = add_camera_metadata_entry(staticMetadata,
185 ANDROID_FLASH_INFO_AVAILABLE,
186 &flashAvailable, 1);
187 METADATA_ASSERT(ret);
188
189 /* Lens static metadata. */
190 float fn = 2.53 / 100;
191 ret = add_camera_metadata_entry(staticMetadata,
192 ANDROID_LENS_INFO_AVAILABLE_APERTURES, &fn, 1);
193 METADATA_ASSERT(ret);
194
195 /* Control metadata. */
196 char controlMetadata = ANDROID_CONTROL_MODE_AUTO;
197 ret = add_camera_metadata_entry(staticMetadata,
198 ANDROID_CONTROL_AVAILABLE_MODES,
199 &controlMetadata, 1);
200 METADATA_ASSERT(ret);
201
202 char availableAntiBandingModes[] = {
203 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
204 ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
205 ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
206 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
207 };
208 ret = add_camera_metadata_entry(staticMetadata,
209 ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
210 availableAntiBandingModes, 4);
211 METADATA_ASSERT(ret);
212
213 char aeAvailableModes[] = {
214 ANDROID_CONTROL_AE_MODE_ON,
215 ANDROID_CONTROL_AE_MODE_OFF,
216 };
217 ret = add_camera_metadata_entry(staticMetadata,
218 ANDROID_CONTROL_AE_AVAILABLE_MODES,
219 aeAvailableModes, 2);
220 METADATA_ASSERT(ret);
221
222 controlMetadata = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE;
223 ret = add_camera_metadata_entry(staticMetadata,
224 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
225 &controlMetadata, 1);
226 METADATA_ASSERT(ret);
227
228 uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
229 ret = add_camera_metadata_entry(staticMetadata,
230 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
231 &awbLockAvailable, 1);
232
233 /* Scaler static metadata. */
234 std::vector<uint32_t> availableStreamFormats = {
235 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
236 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
237 ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
238 };
239 ret = add_camera_metadata_entry(staticMetadata,
240 ANDROID_SCALER_AVAILABLE_FORMATS,
241 availableStreamFormats.data(),
242 availableStreamFormats.size());
243 METADATA_ASSERT(ret);
244
245 std::vector<uint32_t> availableStreamConfigurations = {
246 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920,
247 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
248 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 2560, 1920,
249 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
250 ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920,
251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
252 };
253 ret = add_camera_metadata_entry(staticMetadata,
254 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
255 availableStreamConfigurations.data(),
256 availableStreamConfigurations.size());
257 METADATA_ASSERT(ret);
258
259 std::vector<int64_t> availableStallDurations = {
260 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
261 };
262 ret = add_camera_metadata_entry(staticMetadata,
263 ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
264 availableStallDurations.data(),
265 availableStallDurations.size());
266 METADATA_ASSERT(ret);
267
268 std::vector<int64_t> minFrameDurations = {
269 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
270 ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920, 33333333,
271 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 2560, 1920, 33333333,
272 };
273 ret = add_camera_metadata_entry(staticMetadata,
274 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
275 minFrameDurations.data(), minFrameDurations.size());
276 METADATA_ASSERT(ret);
277
278 /* Info static metadata. */
279 uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
280 ret = add_camera_metadata_entry(staticMetadata,
281 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
282 &supportedHWLevel, 1);
283
284 return staticMetadata;
285}
286
287/*
288 * Produce a metadata pack to be used as template for a capture request.
289 */
290const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
291{
292 int ret;
293
294 /*
295 * \todo Inspect type and pick the right metadata pack.
296 * As of now just use a single one for all templates.
297 */
298 uint8_t captureIntent;
299 switch (type) {
300 case CAMERA3_TEMPLATE_PREVIEW:
301 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
302 break;
303 case CAMERA3_TEMPLATE_STILL_CAPTURE:
304 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
305 break;
306 case CAMERA3_TEMPLATE_VIDEO_RECORD:
307 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
308 break;
309 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
310 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
311 break;
312 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
313 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
314 break;
315 case CAMERA3_TEMPLATE_MANUAL:
316 captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
317 break;
318 default:
319 LOG(HAL, Error) << "Invalid template request type: " << type;
320 return nullptr;
321 }
322
323 if (requestTemplate_)
324 return requestTemplate_;
325
326 /* \todo Use correct sizes */
327 #define REQUEST_TEMPLATE_ENTRIES 30
328 #define REQUEST_TEMPLATE_DATA 2048
329 requestTemplate_ = allocate_camera_metadata(REQUEST_TEMPLATE_ENTRIES,
330 REQUEST_TEMPLATE_DATA);
331 if (!requestTemplate_) {
332 LOG(HAL, Error) << "Failed to allocate template metadata";
333 return nullptr;
334 }
335
336 /* Set to 0 the number of 'processed and stalling' streams (ie JPEG). */
337 int32_t maxOutStream[] = { 0, 2, 0 };
338 ret = add_camera_metadata_entry(requestTemplate_,
339 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
340 maxOutStream, 3);
341 METADATA_ASSERT(ret);
342
343 uint8_t maxPipelineDepth = 5;
344 ret = add_camera_metadata_entry(requestTemplate_,
345 ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
346 &maxPipelineDepth, 1);
347 METADATA_ASSERT(ret);
348
349 int32_t inputStreams = 0;
350 ret = add_camera_metadata_entry(requestTemplate_,
351 ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
352 &inputStreams, 1);
353 METADATA_ASSERT(ret);
354
355 int32_t partialResultCount = 1;
356 ret = add_camera_metadata_entry(requestTemplate_,
357 ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
358 &partialResultCount, 1);
359 METADATA_ASSERT(ret);
360
361 uint8_t availableCapabilities[] = {
362 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
363 };
364 ret = add_camera_metadata_entry(requestTemplate_,
365 ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
366 availableCapabilities, 1);
367 METADATA_ASSERT(ret);
368
369 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
370 ret = add_camera_metadata_entry(requestTemplate_,
371 ANDROID_CONTROL_AE_MODE,
372 &aeMode, 1);
373 METADATA_ASSERT(ret);
374
375 int32_t aeExposureCompensation = 0;
376 ret = add_camera_metadata_entry(requestTemplate_,
377 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
378 &aeExposureCompensation, 1);
379 METADATA_ASSERT(ret);
380
381 uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
382 ret = add_camera_metadata_entry(requestTemplate_,
383 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
384 &aePrecaptureTrigger, 1);
385 METADATA_ASSERT(ret);
386
387 uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
388 ret = add_camera_metadata_entry(requestTemplate_,
389 ANDROID_CONTROL_AE_LOCK,
390 &aeLock, 1);
391 METADATA_ASSERT(ret);
392
393 uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
394 ret = add_camera_metadata_entry(requestTemplate_,
395 ANDROID_CONTROL_AF_TRIGGER,
396 &afTrigger, 1);
397 METADATA_ASSERT(ret);
398
399 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
400 ret = add_camera_metadata_entry(requestTemplate_,
401 ANDROID_CONTROL_AWB_MODE,
402 &awbMode, 1);
403 METADATA_ASSERT(ret);
404
405 uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
406 ret = add_camera_metadata_entry(requestTemplate_,
407 ANDROID_CONTROL_AWB_LOCK,
408 &awbLock, 1);
409 METADATA_ASSERT(ret);
410
411 uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
412 ret = add_camera_metadata_entry(requestTemplate_,
413 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
414 &awbLockAvailable, 1);
415 METADATA_ASSERT(ret);
416
417 uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
418 ret = add_camera_metadata_entry(requestTemplate_,
419 ANDROID_FLASH_MODE,
420 &flashMode, 1);
421 METADATA_ASSERT(ret);
422
423 uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
424 ret = add_camera_metadata_entry(requestTemplate_,
425 ANDROID_STATISTICS_FACE_DETECT_MODE,
426 &faceDetectMode, 1);
427 METADATA_ASSERT(ret);
428
429 ret = add_camera_metadata_entry(requestTemplate_,
430 ANDROID_CONTROL_CAPTURE_INTENT,
431 &captureIntent, 1);
432 METADATA_ASSERT(ret);
433
434 /*
435 * This is quite hard to list at the moment wihtout knowing what
436 * we could control.
437 *
438 * For now, just list in the available Request keys and in the available
439 * result keys the control and reporting of the AE algorithm.
440 */
441 std::vector<int32_t> availableRequestKeys = {
442 ANDROID_CONTROL_AE_MODE,
443 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
444 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
445 ANDROID_CONTROL_AE_LOCK,
446 ANDROID_CONTROL_AF_TRIGGER,
447 ANDROID_CONTROL_AWB_MODE,
448 ANDROID_CONTROL_AWB_LOCK,
449 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
450 ANDROID_CONTROL_CAPTURE_INTENT,
451 ANDROID_FLASH_MODE,
452 ANDROID_STATISTICS_FACE_DETECT_MODE,
453 };
454
455 ret = add_camera_metadata_entry(requestTemplate_,
456 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
457 availableRequestKeys.data(),
458 availableRequestKeys.size());
459 METADATA_ASSERT(ret);
460
461 std::vector<int32_t> availableResultKeys = {
462 ANDROID_CONTROL_AE_MODE,
463 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
464 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
465 ANDROID_CONTROL_AE_LOCK,
466 ANDROID_CONTROL_AF_TRIGGER,
467 ANDROID_CONTROL_AWB_MODE,
468 ANDROID_CONTROL_AWB_LOCK,
469 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
470 ANDROID_CONTROL_CAPTURE_INTENT,
471 ANDROID_FLASH_MODE,
472 ANDROID_STATISTICS_FACE_DETECT_MODE,
473 };
474 ret = add_camera_metadata_entry(requestTemplate_,
475 ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
476 availableResultKeys.data(),
477 availableResultKeys.size());
478 METADATA_ASSERT(ret);
479
480 /*
481 * \todo The available characteristics are be the tags reported
482 * as part of the static metadata reported at hal_get_camera_info()
483 * time. As of now, report an empty list.
484 */
485 std::vector<int32_t> availableCharacteristicsKeys = {};
486 ret = add_camera_metadata_entry(requestTemplate_,
487 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
488 availableCharacteristicsKeys.data(),
489 availableCharacteristicsKeys.size());
490 METADATA_ASSERT(ret);
491
492 return requestTemplate_;
493}
494
495/*
496 * Inspect the stream_list to produce a list of StreamConfiguration to
497 * be use to configure the Camera.
498 */
499int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
500{
501 for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
502 camera3_stream_t *stream = stream_list->streams[i];
503
504 LOG(HAL, Info) << "Stream #" << i
505 << ", direction: " << stream->stream_type
506 << ", width: " << stream->width
507 << ", height: " << stream->height
508 << ", format: " << std::hex << stream->format;
509 }
510
511 /* Hardcode viewfinder role, collecting sizes from the stream config. */
512 if (stream_list->num_streams != 1) {
513 LOG(HAL, Error) << "Only one stream supported";
514 return -EINVAL;
515 }
516
517 StreamRoles roles = { StreamRole::Viewfinder };
518 config_ = camera_->generateConfiguration(roles);
519 if (!config_ || config_->empty()) {
520 LOG(HAL, Error) << "Failed to generate camera configuration";
521 return -EINVAL;
522 }
523
524 /* Only one stream is supported. */
525 camera3_stream_t *camera3Stream = stream_list->streams[0];
526 StreamConfiguration *streamConfiguration = &config_->at(0);
527 streamConfiguration->size.width = camera3Stream->width;
528 streamConfiguration->size.height = camera3Stream->height;
529 streamConfiguration->memoryType = ExternalMemory;
530
531 /*
532 * \todo We'll need to translate from Android defined pixel format codes
533 * to the libcamera image format codes. For now, do not change the
534 * format returned from Camera::generateConfiguration().
535 */
536
537 switch (config_->validate()) {
538 case CameraConfiguration::Valid:
539 break;
540 case CameraConfiguration::Adjusted:
541 LOG(HAL, Info) << "Camera configuration adjusted";
542 config_.reset();
543 return -EINVAL;
544 case CameraConfiguration::Invalid:
545 LOG(HAL, Info) << "Camera configuration invalid";
546 config_.reset();
547 return -EINVAL;
548 }
549
550 camera3Stream->max_buffers = streamConfiguration->bufferCount;
551
552 /*
553 * Once the CameraConfiguration has been adjusted/validated
554 * it can be applied to the camera.
555 */
556 int ret = camera_->configure(config_.get());
557 if (ret) {
558 LOG(HAL, Error) << "Failed to configure camera '"
559 << camera_->name() << "'";
560 return ret;
561 }
562
563 return 0;
564}
565
566int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
567{
568 StreamConfiguration *streamConfiguration = &config_->at(0);
569 Stream *stream = streamConfiguration->stream();
570
571 if (camera3Request->num_output_buffers != 1) {
572 LOG(HAL, Error) << "Invalid number of output buffers: "
573 << camera3Request->num_output_buffers;
574 return -EINVAL;
575 }
576
577 /* Start the camera if that's the first request we handle. */
578 if (!running_) {
579 int ret = camera_->allocateBuffers();
580 if (ret) {
581 LOG(HAL, Error) << "Failed to allocate buffers";
582 return ret;
583 }
584
585 ret = camera_->start();
586 if (ret) {
587 LOG(HAL, Error) << "Failed to start camera";
588 camera_->freeBuffers();
589 return ret;
590 }
591
592 running_ = true;
593 }
594
595 /*
596 * Queue a request for the Camera with the provided dmabuf file
597 * descriptors.
598 */
599 const camera3_stream_buffer_t *camera3Buffers =
600 camera3Request->output_buffers;
601
602 /*
603 * Save the request descriptors for use at completion time.
604 * The descriptor and the associated memory reserved here are freed
605 * at request complete time.
606 */
607 Camera3RequestDescriptor *descriptor =
608 new Camera3RequestDescriptor(camera3Request->frame_number,
609 camera3Request->num_output_buffers);
610 for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
611 /*
612 * Keep track of which stream the request belongs to and store
613 * the native buffer handles.
614 *
615 * \todo Currently we only support one capture buffer. Copy
616 * all of them to be ready once we'll support more.
617 */
618 descriptor->buffers[i].stream = camera3Buffers[i].stream;
619 descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
620 }
621
622 /*
623 * Create a libcamera buffer using the dmabuf descriptors of the first
624 * and (currently) only supported request buffer.
625 */
626 const buffer_handle_t camera3Handle = *camera3Buffers[0].buffer;
627 std::array<int, 3> fds = {
628 camera3Handle->data[0],
629 camera3Handle->data[1],
630 camera3Handle->data[2],
631 };
632
633 std::unique_ptr<Buffer> buffer = stream->createBuffer(fds);
634 if (!buffer) {
635 LOG(HAL, Error) << "Failed to create buffer";
636 delete descriptor;
637 return -EINVAL;
638 }
639
640 Request *request =
641 camera_->createRequest(reinterpret_cast<uint64_t>(descriptor));
642 request->addBuffer(std::move(buffer));
643
644 int ret = camera_->queueRequest(request);
645 if (ret) {
646 LOG(HAL, Error) << "Failed to queue request";
647 goto error;
648 }
649
650 return 0;
651
652error:
653 delete request;
654 delete descriptor;
655
656 return ret;
657}
658
659void CameraDevice::requestComplete(Request *request,
660 const std::map<Stream *, Buffer *> &buffers)
661{
662 Buffer *libcameraBuffer = buffers.begin()->second;
663 camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK;
664 camera_metadata_t *resultMetadata = nullptr;
665
666 if (request->status() != Request::RequestComplete) {
667 LOG(HAL, Error) << "Request not succesfully completed: "
668 << request->status();
669 status = CAMERA3_BUFFER_STATUS_ERROR;
670 }
671
672 /* Prepare to call back the Android camera stack. */
673 Camera3RequestDescriptor *descriptor =
674 reinterpret_cast<Camera3RequestDescriptor *>(request->cookie());
675
676 camera3_capture_result_t captureResult = {};
677 captureResult.frame_number = descriptor->frameNumber;
678 captureResult.num_output_buffers = descriptor->numBuffers;
679 for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
680 /*
681 * \todo Currently we only support one capture buffer. Prepare
682 * all of them to be ready once we'll support more.
683 */
684 descriptor->buffers[i].acquire_fence = -1;
685 descriptor->buffers[i].release_fence = -1;
686 descriptor->buffers[i].status = status;
687 }
688 captureResult.output_buffers =
689 const_cast<const camera3_stream_buffer_t *>(descriptor->buffers);
690
691 if (status == CAMERA3_BUFFER_STATUS_ERROR) {
692 /* \todo Improve error handling. */
693 notifyError(descriptor->frameNumber,
694 descriptor->buffers[0].stream);
695 } else {
696 notifyShutter(descriptor->frameNumber,
697 libcameraBuffer->timestamp());
698
699 captureResult.partial_result = 1;
700 resultMetadata = getResultMetadata(descriptor->frameNumber,
701 libcameraBuffer->timestamp());
702 captureResult.result = resultMetadata;
703 }
704
705 callbacks_->process_capture_result(callbacks_, &captureResult);
706
707 delete descriptor;
708 if (resultMetadata)
709 free_camera_metadata(resultMetadata);
710
711 return;
712}
713
714void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp)
715{
716 camera3_notify_msg_t notify = {};
717
718 notify.type = CAMERA3_MSG_SHUTTER;
719 notify.message.shutter.frame_number = frameNumber;
720 notify.message.shutter.timestamp = timestamp;
721
722 callbacks_->notify(callbacks_, &notify);
723}
724
725void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
726{
727 camera3_notify_msg_t notify = {};
728
729 notify.type = CAMERA3_MSG_ERROR;
730 notify.message.error.error_stream = stream;
731 notify.message.error.frame_number = frameNumber;
732 notify.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
733
734 callbacks_->notify(callbacks_, &notify);
735}
736
737/*
738 * Produce a set of fixed result metadata.
739 */
740camera_metadata_t *CameraDevice::getResultMetadata(int frame_number,
741 int64_t timestamp)
742{
743 int ret;
744
745 /* \todo Use correct sizes */
746 #define RESULT_ENTRY_CAP 256
747 #define RESULT_DATA_CAP 6688
748 camera_metadata_t *resultMetadata =
749 allocate_camera_metadata(STATIC_ENTRY_CAP, STATIC_DATA_CAP);
750
751 const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
752 ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AE_STATE,
753 &ae_state, 1);
754 METADATA_ASSERT(ret);
755
756 const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
757 ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AE_LOCK,
758 &ae_lock, 1);
759 METADATA_ASSERT(ret);
760
761 uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
762 ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AF_STATE,
763 &af_state, 1);
764 METADATA_ASSERT(ret);
765
766 const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED;
767 ret = add_camera_metadata_entry(resultMetadata,
768 ANDROID_CONTROL_AWB_STATE,
769 &awb_state, 1);
770 METADATA_ASSERT(ret);
771
772 const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF;
773 ret = add_camera_metadata_entry(resultMetadata,
774 ANDROID_CONTROL_AWB_LOCK,
775 &awb_lock, 1);
776 METADATA_ASSERT(ret);
777
778 const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY;
779 ret = add_camera_metadata_entry(resultMetadata,
780 ANDROID_LENS_STATE,
781 &lens_state, 1);
782 METADATA_ASSERT(ret);
783
784 int32_t sensorSizes[] = {
785 0, 0, 2560, 1920,
786 };
787 ret = add_camera_metadata_entry(resultMetadata,
788 ANDROID_SCALER_CROP_REGION,
789 sensorSizes, 4);
790 METADATA_ASSERT(ret);
791
792 ret = add_camera_metadata_entry(resultMetadata,
793 ANDROID_SENSOR_TIMESTAMP,
794 &timestamp, 1);
795 METADATA_ASSERT(ret);
796
797 /* 33.3 msec */
798 const int64_t rolling_shutter_skew = 33300000;
799 ret = add_camera_metadata_entry(resultMetadata,
800 ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
801 &rolling_shutter_skew, 1);
802 METADATA_ASSERT(ret);
803
804 /* 16.6 msec */
805 const int64_t exposure_time = 16600000;
806 ret = add_camera_metadata_entry(resultMetadata,
807 ANDROID_SENSOR_EXPOSURE_TIME,
808 &exposure_time, 1);
809 METADATA_ASSERT(ret);
810
811 const uint8_t lens_shading_map_mode =
812 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
813 ret = add_camera_metadata_entry(resultMetadata,
814 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
815 &lens_shading_map_mode, 1);
816 METADATA_ASSERT(ret);
817
818 const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
819 ret = add_camera_metadata_entry(resultMetadata,
820 ANDROID_STATISTICS_SCENE_FLICKER,
821 &scene_flicker, 1);
822 METADATA_ASSERT(ret);
823
824 return resultMetadata;
825}