Unnamed: 0
int64
0
0
repo_id
stringlengths
5
186
file_path
stringlengths
15
223
content
stringlengths
1
32.8M
βŒ€
0
repos/libcamera/src
repos/libcamera/src/android/camera_device.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Device */ #pragma once #include <map> #include <memory> #include <queue> #include <vector> #include <hardware/camera3.h> #include <libcamera/base/class.h> #include <libcamera/base/log.h> #include <libcamera/base/message.h> #include <libcamera/base/mutex.h> #include <libcamera/camera.h> #include <libcamera/framebuffer.h> #include <libcamera/geometry.h> #include <libcamera/pixel_format.h> #include <libcamera/request.h> #include <libcamera/stream.h> #include "camera_capabilities.h" #include "camera_metadata.h" #include "camera_stream.h" #include "hal_framebuffer.h" #include "jpeg/encoder.h" class Camera3RequestDescriptor; struct CameraConfigData; class CameraDevice : protected libcamera::Loggable { public: static std::unique_ptr<CameraDevice> create(unsigned int id, std::shared_ptr<libcamera::Camera> cam); ~CameraDevice(); int initialize(const CameraConfigData *cameraConfigData); int open(const hw_module_t *hardwareModule); void close(); void flush(); unsigned int id() const { return id_; } camera3_device_t *camera3Device() { return &camera3Device_; } const CameraCapabilities *capabilities() const { return &capabilities_; } const std::shared_ptr<libcamera::Camera> &camera() const { return camera_; } const std::string &maker() const { return maker_; } const std::string &model() const { return model_; } int facing() const { return facing_; } int orientation() const { return orientation_; } unsigned int maxJpegBufferSize() const; void setCallbacks(const camera3_callback_ops_t *callbacks); const camera_metadata_t *getStaticMetadata(); const camera_metadata_t *constructDefaultRequestSettings(int type); int configureStreams(camera3_stream_configuration_t *stream_list); int processCaptureRequest(camera3_capture_request_t *request); void requestComplete(libcamera::Request *request); void streamProcessingComplete(Camera3RequestDescriptor::StreamBuffer *bufferStream, Camera3RequestDescriptor::Status status); protected: std::string logPrefix() const override; private: LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraDevice) CameraDevice(unsigned int id, std::shared_ptr<libcamera::Camera> camera); enum class State { Stopped, Flushing, Running, }; void stop() LIBCAMERA_TSA_EXCLUDES(stateMutex_); std::unique_ptr<HALFrameBuffer> createFrameBuffer(const buffer_handle_t camera3buffer, libcamera::PixelFormat pixelFormat, const libcamera::Size &size); void abortRequest(Camera3RequestDescriptor *descriptor) const; bool isValidRequest(camera3_capture_request_t *request) const; void notifyShutter(uint32_t frameNumber, uint64_t timestamp); void notifyError(uint32_t frameNumber, camera3_stream_t *stream, camera3_error_msg_code code) const; int processControls(Camera3RequestDescriptor *descriptor); void completeDescriptor(Camera3RequestDescriptor *descriptor) LIBCAMERA_TSA_EXCLUDES(descriptorsMutex_); void sendCaptureResults() LIBCAMERA_TSA_REQUIRES(descriptorsMutex_); void setBufferStatus(Camera3RequestDescriptor::StreamBuffer &buffer, Camera3RequestDescriptor::Status status); std::unique_ptr<CameraMetadata> getResultMetadata( const Camera3RequestDescriptor &descriptor) const; unsigned int id_; camera3_device_t camera3Device_; libcamera::Mutex stateMutex_; /* Protects access to the camera state. */ State state_ LIBCAMERA_TSA_GUARDED_BY(stateMutex_); std::shared_ptr<libcamera::Camera> camera_; std::unique_ptr<libcamera::CameraConfiguration> config_; CameraCapabilities capabilities_; std::map<unsigned int, std::unique_ptr<CameraMetadata>> requestTemplates_; const camera3_callback_ops_t *callbacks_; std::vector<CameraStream> streams_; libcamera::Mutex descriptorsMutex_ LIBCAMERA_TSA_ACQUIRED_AFTER(stateMutex_); std::queue<std::unique_ptr<Camera3RequestDescriptor>> descriptors_ LIBCAMERA_TSA_GUARDED_BY(descriptorsMutex_); std::string maker_; std::string model_; int facing_; int orientation_; CameraMetadata lastSettings_; };
0
repos/libcamera/src
repos/libcamera/src/android/camera_hal_config.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Camera HAL configuration file manager */ #pragma once #include <map> #include <string> #include <libcamera/base/class.h> struct CameraConfigData { int facing = -1; int rotation = -1; }; class CameraHalConfig final : public libcamera::Extensible { LIBCAMERA_DECLARE_PRIVATE() public: CameraHalConfig(); bool exists() const { return exists_; } bool isValid() const { return valid_; } const CameraConfigData *cameraConfigData(const std::string &cameraId) const; private: bool exists_; bool valid_; std::map<std::string, CameraConfigData> cameras_; int parseConfigurationFile(); };
0
repos/libcamera/src
repos/libcamera/src/android/camera_stream.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Camera HAL stream */ #include "camera_stream.h" #include <errno.h> #include <string.h> #include <sys/mman.h> #include <sys/poll.h> #include <unistd.h> #include <libcamera/formats.h> #include "jpeg/post_processor_jpeg.h" #include "yuv/post_processor_yuv.h" #include "camera_buffer.h" #include "camera_capabilities.h" #include "camera_device.h" #include "camera_metadata.h" #include "frame_buffer_allocator.h" #include "post_processor.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) /* * \class CameraStream * \brief Map a camera3_stream_t to a StreamConfiguration * * The CameraStream class maps a camera3_stream_t provided by Android * camera framework to a libcamera::StreamConfiguration. * * The StreamConfiguration is represented by its index as recorded in the * CameraConfiguration and not by pointer as StreamConfiguration is subject to * relocation. * * A single StreamConfiguration may be used to deliver one or more streams to * the Android framework. The mapping type between a camera3 stream to a * StreamConfiguration is described by the CameraStream::Type. * * CameraStream handles all the aspects of producing a stream with the size * and format requested by the camera3 stream from the data produced by * the associated libcamera::Stream, including the creation of the encoder * and buffer allocation. */ CameraStream::CameraStream(CameraDevice *const cameraDevice, CameraConfiguration *config, Type type, camera3_stream_t *camera3Stream, CameraStream *const sourceStream, unsigned int index) : cameraDevice_(cameraDevice), config_(config), type_(type), camera3Stream_(camera3Stream), sourceStream_(sourceStream), index_(index) { } CameraStream::CameraStream(CameraStream &&other) = default; CameraStream::~CameraStream() { /* * Manually delete buffers and then the allocator to make sure buffers * are released while the allocator is still valid. */ allocatedBuffers_.clear(); allocator_.reset(); } const StreamConfiguration &CameraStream::configuration() const { return config_->at(index_); } Stream *CameraStream::stream() const { return configuration().stream(); } int CameraStream::configure() { if (type_ == Type::Internal || type_ == Type::Mapped) { const PixelFormat outFormat = cameraDevice_->capabilities()->toPixelFormat(camera3Stream_->format); StreamConfiguration output = configuration(); output.pixelFormat = outFormat; output.size.width = camera3Stream_->width; output.size.height = camera3Stream_->height; switch (outFormat) { case formats::NV12: postProcessor_ = std::make_unique<PostProcessorYuv>(); break; case formats::MJPEG: postProcessor_ = std::make_unique<PostProcessorJpeg>(cameraDevice_); break; default: LOG(HAL, Error) << "Unsupported format: " << outFormat; return -EINVAL; } int ret = postProcessor_->configure(configuration(), output); if (ret) return ret; worker_ = std::make_unique<PostProcessorWorker>(postProcessor_.get()); postProcessor_->processComplete.connect( this, [&](Camera3RequestDescriptor::StreamBuffer *streamBuffer, PostProcessor::Status status) { Camera3RequestDescriptor::Status bufferStatus; if (status == PostProcessor::Status::Success) bufferStatus = Camera3RequestDescriptor::Status::Success; else bufferStatus = Camera3RequestDescriptor::Status::Error; cameraDevice_->streamProcessingComplete(streamBuffer, bufferStatus); }); worker_->start(); } allocator_ = std::make_unique<PlatformFrameBufferAllocator>(cameraDevice_); mutex_ = std::make_unique<Mutex>(); camera3Stream_->max_buffers = configuration().bufferCount; return 0; } int CameraStream::waitFence(int fence) { /* * \todo The implementation here is copied from camera_worker.cpp * and both should be removed once libcamera is instrumented to handle * fences waiting in the core. * * \todo Better characterize the timeout. Currently equal to the one * used by the Rockchip Camera HAL on ChromeOS. */ constexpr unsigned int timeoutMs = 300; struct pollfd fds = { fence, POLLIN, 0 }; do { int ret = poll(&fds, 1, timeoutMs); if (ret == 0) return -ETIME; if (ret > 0) { if (fds.revents & (POLLERR | POLLNVAL)) return -EINVAL; return 0; } } while (errno == EINTR || errno == EAGAIN); return -errno; } int CameraStream::process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) { ASSERT(type_ != Type::Direct); /* Handle waiting on fences on the destination buffer. */ if (streamBuffer->fence.isValid()) { int ret = waitFence(streamBuffer->fence.get()); if (ret < 0) { LOG(HAL, Error) << "Failed waiting for fence: " << streamBuffer->fence.get() << ": " << strerror(-ret); return ret; } streamBuffer->fence.reset(); } const StreamConfiguration &output = configuration(); streamBuffer->dstBuffer = std::make_unique<CameraBuffer>( *streamBuffer->camera3Buffer, output.pixelFormat, output.size, PROT_READ | PROT_WRITE); if (!streamBuffer->dstBuffer->isValid()) { LOG(HAL, Error) << "Failed to create destination buffer"; return -EINVAL; } worker_->queueRequest(streamBuffer); return 0; } void CameraStream::flush() { if (!postProcessor_) return; worker_->flush(); } FrameBuffer *CameraStream::getBuffer() { if (!allocator_) return nullptr; MutexLocker locker(*mutex_); if (buffers_.empty()) { /* * Use HAL_PIXEL_FORMAT_YCBCR_420_888 unconditionally. * * YCBCR_420 is the source format for both the JPEG and the YUV * post-processors. * * \todo Store a reference to the format of the source stream * instead of hardcoding. */ auto frameBuffer = allocator_->allocate(HAL_PIXEL_FORMAT_YCBCR_420_888, configuration().size, camera3Stream_->usage); allocatedBuffers_.push_back(std::move(frameBuffer)); buffers_.emplace_back(allocatedBuffers_.back().get()); } FrameBuffer *buffer = buffers_.back(); buffers_.pop_back(); return buffer; } void CameraStream::putBuffer(FrameBuffer *buffer) { if (!allocator_) return; MutexLocker locker(*mutex_); buffers_.push_back(buffer); } /** * \class CameraStream::PostProcessorWorker * \brief Post-process a CameraStream in an internal thread * * If the association between CameraStream and camera3_stream_t dictated by * CameraStream::Type is internal or mapped, the stream is generated by post * processing of a libcamera stream. Such a request is queued to a * PostProcessorWorker in CameraStream::process(). A queue of post-processing * requests is maintained by the PostProcessorWorker and it will run the * post-processing on an internal thread as soon as any request is available on * its queue. */ CameraStream::PostProcessorWorker::PostProcessorWorker(PostProcessor *postProcessor) : postProcessor_(postProcessor) { } CameraStream::PostProcessorWorker::~PostProcessorWorker() { { MutexLocker lock(mutex_); state_ = State::Stopped; } cv_.notify_one(); wait(); } void CameraStream::PostProcessorWorker::start() { { MutexLocker lock(mutex_); ASSERT(state_ != State::Running); state_ = State::Running; } Thread::start(); } void CameraStream::PostProcessorWorker::queueRequest(Camera3RequestDescriptor::StreamBuffer *dest) { { MutexLocker lock(mutex_); ASSERT(state_ == State::Running); requests_.push(dest); } cv_.notify_one(); } void CameraStream::PostProcessorWorker::run() { MutexLocker locker(mutex_); while (1) { cv_.wait(locker, [&]() LIBCAMERA_TSA_REQUIRES(mutex_) { return state_ != State::Running || !requests_.empty(); }); if (state_ != State::Running) break; Camera3RequestDescriptor::StreamBuffer *streamBuffer = requests_.front(); requests_.pop(); locker.unlock(); postProcessor_->process(streamBuffer); locker.lock(); } if (state_ == State::Flushing) { std::queue<Camera3RequestDescriptor::StreamBuffer *> requests = std::move(requests_); locker.unlock(); while (!requests.empty()) { postProcessor_->processComplete.emit( requests.front(), PostProcessor::Status::Error); requests.pop(); } locker.lock(); state_ = State::Stopped; } } void CameraStream::PostProcessorWorker::flush() { MutexLocker lock(mutex_); state_ = State::Flushing; lock.unlock(); cv_.notify_one(); }
0
repos/libcamera/src
repos/libcamera/src/android/camera_capabilities.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Camera static properties manager */ #pragma once #include <map> #include <memory> #include <set> #include <vector> #include <libcamera/base/class.h> #include <libcamera/camera.h> #include <libcamera/formats.h> #include <libcamera/geometry.h> #include "camera_metadata.h" class CameraCapabilities { public: CameraCapabilities() = default; int initialize(std::shared_ptr<libcamera::Camera> camera, int orientation, int facing); CameraMetadata *staticMetadata() const { return staticMetadata_.get(); } libcamera::PixelFormat toPixelFormat(int format) const; unsigned int maxJpegBufferSize() const { return maxJpegBufferSize_; } std::unique_ptr<CameraMetadata> requestTemplateManual() const; std::unique_ptr<CameraMetadata> requestTemplatePreview() const; std::unique_ptr<CameraMetadata> requestTemplateStill() const; std::unique_ptr<CameraMetadata> requestTemplateVideo() const; private: LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraCapabilities) struct Camera3StreamConfiguration { libcamera::Size resolution; int androidFormat; int64_t minFrameDurationNsec; int64_t maxFrameDurationNsec; }; bool validateManualSensorCapability(); bool validateManualPostProcessingCapability(); bool validateBurstCaptureCapability(); std::set<camera_metadata_enum_android_request_available_capabilities> computeCapabilities(); void computeHwLevel( const std::set<camera_metadata_enum_android_request_available_capabilities> &caps); std::vector<libcamera::Size> initializeYUVResolutions(const libcamera::PixelFormat &pixelFormat, const std::vector<libcamera::Size> &resolutions); std::vector<libcamera::Size> initializeRawResolutions(const libcamera::PixelFormat &pixelFormat); int initializeStreamConfigurations(); int initializeStaticMetadata(); std::shared_ptr<libcamera::Camera> camera_; int facing_; int orientation_; bool rawStreamAvailable_; int64_t maxFrameDuration_; camera_metadata_enum_android_info_supported_hardware_level hwLevel_; std::set<camera_metadata_enum_android_request_available_capabilities> capabilities_; std::vector<Camera3StreamConfiguration> streamConfigurations_; std::map<int, libcamera::PixelFormat> formatsMap_; std::unique_ptr<CameraMetadata> staticMetadata_; unsigned int maxJpegBufferSize_; std::set<int32_t> availableCharacteristicsKeys_; std::set<int32_t> availableRequestKeys_; std::set<int32_t> availableResultKeys_; };
0
repos/libcamera/src
repos/libcamera/src/android/camera_hal_config.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Camera HAL configuration file manager */ #include "camera_hal_config.h" #include <stdlib.h> #include <string> #include <libcamera/base/file.h> #include <libcamera/base/log.h> #include "libcamera/internal/yaml_parser.h" #include <hardware/camera3.h> using namespace libcamera; LOG_DEFINE_CATEGORY(HALConfig) class CameraHalConfig::Private : public Extensible::Private { LIBCAMERA_DECLARE_PUBLIC(CameraHalConfig) public: Private(); int parseConfigFile(File &file, std::map<std::string, CameraConfigData> *cameras); private: int parseCameraConfigData(const std::string &cameraId, const YamlObject &); int parseLocation(const YamlObject &, CameraConfigData &cameraConfigData); int parseRotation(const YamlObject &, CameraConfigData &cameraConfigData); std::map<std::string, CameraConfigData> *cameras_; }; CameraHalConfig::Private::Private() { } int CameraHalConfig::Private::parseConfigFile(File &file, std::map<std::string, CameraConfigData> *cameras) { /* * Parse the HAL properties. * * Each camera properties block is a list of properties associated * with the ID (as assembled by CameraSensor::generateId()) of the * camera they refer to. * * cameras: * "camera0 id": * location: value * rotation: value * ... * * "camera1 id": * location: value * rotation: value * ... */ cameras_ = cameras; std::unique_ptr<YamlObject> root = YamlParser::parse(file); if (!root) return -EINVAL; if (!root->isDictionary()) return -EINVAL; /* Parse property "cameras" */ if (!root->contains("cameras")) return -EINVAL; const YamlObject &yamlObjectCameras = (*root)["cameras"]; if (!yamlObjectCameras.isDictionary()) return -EINVAL; for (const auto &[cameraId, configData] : yamlObjectCameras.asDict()) { if (parseCameraConfigData(cameraId, configData)) return -EINVAL; } return 0; } int CameraHalConfig::Private::parseCameraConfigData(const std::string &cameraId, const YamlObject &cameraObject) { if (!cameraObject.isDictionary()) return -EINVAL; CameraConfigData &cameraConfigData = (*cameras_)[cameraId]; /* Parse property "location" */ if (parseLocation(cameraObject, cameraConfigData)) return -EINVAL; /* Parse property "rotation" */ if (parseRotation(cameraObject, cameraConfigData)) return -EINVAL; return 0; } int CameraHalConfig::Private::parseLocation(const YamlObject &cameraObject, CameraConfigData &cameraConfigData) { if (!cameraObject.contains("location")) return -EINVAL; std::string location = cameraObject["location"].get<std::string>(""); if (location == "front") cameraConfigData.facing = CAMERA_FACING_FRONT; else if (location == "back") cameraConfigData.facing = CAMERA_FACING_BACK; else return -EINVAL; return 0; } int CameraHalConfig::Private::parseRotation(const YamlObject &cameraObject, CameraConfigData &cameraConfigData) { if (!cameraObject.contains("rotation")) return -EINVAL; int32_t rotation = cameraObject["rotation"].get<int32_t>(-1); if (rotation < 0 || rotation >= 360) { LOG(HALConfig, Error) << "Unknown rotation: " << rotation; return -EINVAL; } cameraConfigData.rotation = rotation; return 0; } CameraHalConfig::CameraHalConfig() : Extensible(std::make_unique<Private>()), exists_(false), valid_(false) { parseConfigurationFile(); } /* * Open the HAL configuration file and validate its content. * Return 0 on success, a negative error code otherwise * retval -ENOENT The configuration file is not available * retval -EINVAL The configuration file is available but not valid */ int CameraHalConfig::parseConfigurationFile() { std::string filePath = LIBCAMERA_SYSCONF_DIR "/camera_hal.yaml"; File file(filePath); if (!file.exists()) { LOG(HALConfig, Debug) << "Configuration file: \"" << filePath << "\" not found"; return -ENOENT; } if (!file.open(File::OpenModeFlag::ReadOnly)) { int ret = file.error(); LOG(HALConfig, Error) << "Failed to open configuration file " << filePath << ": " << strerror(-ret); return ret; } exists_ = true; int ret = _d()->parseConfigFile(file, &cameras_); if (ret) return -EINVAL; valid_ = true; for (const auto &c : cameras_) { const std::string &cameraId = c.first; const CameraConfigData &camera = c.second; LOG(HALConfig, Debug) << "'" << cameraId << "' " << "(" << camera.facing << ")[" << camera.rotation << "]"; } return 0; } const CameraConfigData *CameraHalConfig::cameraConfigData(const std::string &cameraId) const { const auto &it = cameras_.find(cameraId); if (it == cameras_.end()) { LOG(HALConfig, Error) << "Camera '" << cameraId << "' not described in the HAL configuration file"; return nullptr; } return &it->second; }
0
repos/libcamera/src
repos/libcamera/src/android/camera_hal_manager.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Manager */ #pragma once #include <map> #include <stddef.h> #include <tuple> #include <vector> #include <hardware/camera_common.h> #include <hardware/hardware.h> #include <system/camera_metadata.h> #include <libcamera/base/class.h> #include <libcamera/base/mutex.h> #include <libcamera/camera_manager.h> #include "camera_hal_config.h" class CameraDevice; class CameraHalManager { public: ~CameraHalManager(); static CameraHalManager *instance(); int init(); std::tuple<CameraDevice *, int> open(unsigned int id, const hw_module_t *module); unsigned int numCameras() const; int getCameraInfo(unsigned int id, struct camera_info *info); void setCallbacks(const camera_module_callbacks_t *callbacks); private: LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraHalManager) static constexpr unsigned int firstExternalCameraId_ = 1000; CameraHalManager(); static int32_t cameraLocation(const libcamera::Camera *cam); void cameraAdded(std::shared_ptr<libcamera::Camera> cam); void cameraRemoved(std::shared_ptr<libcamera::Camera> cam); CameraDevice *cameraDeviceFromHalId(unsigned int id) LIBCAMERA_TSA_REQUIRES(mutex_); std::unique_ptr<libcamera::CameraManager> cameraManager_; CameraHalConfig halConfig_; const camera_module_callbacks_t *callbacks_; std::vector<std::unique_ptr<CameraDevice>> cameras_ LIBCAMERA_TSA_GUARDED_BY(mutex_); std::map<std::string, unsigned int> cameraIdsMap_ LIBCAMERA_TSA_GUARDED_BY(mutex_); libcamera::Mutex mutex_; unsigned int numInternalCameras_; unsigned int nextExternalCameraId_; };
0
repos/libcamera/src
repos/libcamera/src/android/camera_buffer.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Frame buffer handling interface definition */ #pragma once #include <hardware/camera3.h> #include <libcamera/base/class.h> #include <libcamera/base/span.h> #include <libcamera/geometry.h> #include <libcamera/pixel_format.h> class CameraBuffer final : public libcamera::Extensible { LIBCAMERA_DECLARE_PRIVATE() public: CameraBuffer(buffer_handle_t camera3Buffer, libcamera::PixelFormat pixelFormat, const libcamera::Size &size, int flags); ~CameraBuffer(); bool isValid() const; unsigned int numPlanes() const; libcamera::Span<const uint8_t> plane(unsigned int plane) const; libcamera::Span<uint8_t> plane(unsigned int plane); unsigned int stride(unsigned int plane) const; unsigned int offset(unsigned int plane) const; unsigned int size(unsigned int plane) const; size_t jpegBufferSize(size_t maxJpegBufferSize) const; }; #define PUBLIC_CAMERA_BUFFER_IMPLEMENTATION \ CameraBuffer::CameraBuffer(buffer_handle_t camera3Buffer, \ libcamera::PixelFormat pixelFormat, \ const libcamera::Size &size, int flags) \ : Extensible(std::make_unique<Private>(this, camera3Buffer, \ pixelFormat, size, \ flags)) \ { \ } \ CameraBuffer::~CameraBuffer() \ { \ } \ bool CameraBuffer::isValid() const \ { \ return _d()->isValid(); \ } \ unsigned int CameraBuffer::numPlanes() const \ { \ return _d()->numPlanes(); \ } \ Span<const uint8_t> CameraBuffer::plane(unsigned int plane) const \ { \ return const_cast<Private *>(_d())->plane(plane); \ } \ Span<uint8_t> CameraBuffer::plane(unsigned int plane) \ { \ return _d()->plane(plane); \ } \ unsigned int CameraBuffer::stride(unsigned int plane) const \ { \ return _d()->stride(plane); \ } \ unsigned int CameraBuffer::offset(unsigned int plane) const \ { \ return _d()->offset(plane); \ } \ unsigned int CameraBuffer::size(unsigned int plane) const \ { \ return _d()->size(plane); \ } \ size_t CameraBuffer::jpegBufferSize(size_t maxJpegBufferSize) const \ { \ return _d()->jpegBufferSize(maxJpegBufferSize); \ }
0
repos/libcamera/src
repos/libcamera/src/android/cros_mojo_token.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * cros-specific mojo token */ #pragma once #include <cros-camera/cros_camera_hal.h> inline cros::CameraMojoChannelManagerToken *gCrosMojoToken = nullptr;
0
repos/libcamera/src
repos/libcamera/src/android/camera_ops.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Android Camera HAL Operations */ #pragma once #include <hardware/camera3.h> int hal_dev_close(hw_device_t *hw_device); extern camera3_device_ops hal_dev_ops;
0
repos/libcamera/src
repos/libcamera/src/android/camera3_hal.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Android Camera HALv3 module */ #include <hardware/camera_common.h> #include <libcamera/base/log.h> #include "camera_device.h" #include "camera_hal_manager.h" using namespace libcamera; LOG_DEFINE_CATEGORY(HAL) /*------------------------------------------------------------------------------ * Android Camera HAL callbacks */ static int hal_get_number_of_cameras() { return CameraHalManager::instance()->numCameras(); } static int hal_get_camera_info(int id, struct camera_info *info) { return CameraHalManager::instance()->getCameraInfo(id, info); } static int hal_set_callbacks(const camera_module_callbacks_t *callbacks) { CameraHalManager::instance()->setCallbacks(callbacks); return 0; } static int hal_open_legacy([[maybe_unused]] const struct hw_module_t *module, [[maybe_unused]] const char *id, [[maybe_unused]] uint32_t halVersion, [[maybe_unused]] struct hw_device_t **device) { return -ENOSYS; } static int hal_set_torch_mode([[maybe_unused]] const char *camera_id, [[maybe_unused]] bool enabled) { return -ENOSYS; } /* * First entry point of the camera HAL module. * * Initialize the HAL but does not open any camera device yet (see hal_dev_open) */ static int hal_init() { LOG(HAL, Info) << "Initialising Android camera HAL"; CameraHalManager::instance()->init(); return 0; } /*------------------------------------------------------------------------------ * Android Camera Device */ static int hal_dev_open(const hw_module_t *module, const char *name, hw_device_t **device) { LOG(HAL, Debug) << "Open camera " << name; int id = atoi(name); auto [camera, ret] = CameraHalManager::instance()->open(id, module); if (!camera) { LOG(HAL, Error) << "Failed to open camera module '" << id << "'"; return ret == -EBUSY ? -EUSERS : ret; } *device = &camera->camera3Device()->common; return 0; } static struct hw_module_methods_t hal_module_methods = { .open = hal_dev_open, }; camera_module_t HAL_MODULE_INFO_SYM = { .common = { .tag = HARDWARE_MODULE_TAG, .module_api_version = CAMERA_MODULE_API_VERSION_2_4, .hal_api_version = HARDWARE_HAL_API_VERSION, .id = CAMERA_HARDWARE_MODULE_ID, .name = "libcamera camera HALv3 module", .author = "libcamera", .methods = &hal_module_methods, .dso = nullptr, .reserved = {}, }, .get_number_of_cameras = hal_get_number_of_cameras, .get_camera_info = hal_get_camera_info, .set_callbacks = hal_set_callbacks, .get_vendor_tag_ops = nullptr, .open_legacy = hal_open_legacy, .set_torch_mode = hal_set_torch_mode, .init = hal_init, .reserved = {}, };
0
repos/libcamera/src
repos/libcamera/src/android/camera_request.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019-2021, Google Inc. * * libcamera Android Camera Request Descriptor */ #include "camera_request.h" #include <libcamera/base/span.h> #include "camera_buffer.h" using namespace libcamera; /* * \class Camera3RequestDescriptor * * A utility class that groups information about a capture request to be later * reused at request complete time to notify the framework. * ******************************************************************************* * Lifetime of a Camera3RequestDescriptor tracking a capture request placed by * Android Framework ******************************************************************************* * * * Android Framework * β”‚ * β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” * β”‚ β”‚camera3_capture_request_t β”‚ * β”‚ β”‚ β”‚ * β”‚ β”‚Requested output streams β”‚ * β”‚ β”‚ stream1 stream2 stream3 ... β”‚ * β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ * β–Ό * β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” * β”‚ libcamera HAL β”‚ * β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ * β”‚ CameraDevice β”‚ * β”‚ β”‚ * β”‚ processCaptureRequest(camera3_capture_request_t request) β”‚ * β”‚ β”‚ * β”‚ - Create Camera3RequestDescriptor tracking this request β”‚ * β”‚ - Streams requiring post-processing are stored in the β”‚ * β”‚ pendingStreamsToProcess map β”‚ * β”‚ - Add this Camera3RequestDescriptor to descriptors' queue β”‚ * β”‚ CameraDevice::descriptors_ β”‚ * β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” * β”‚ - Queue the capture request to libcamera core ─────────────►│libcamera core β”‚ * β”‚ β”‚ β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ * β”‚ β”‚ β”‚- Capture from Camera β”‚ * β”‚ β”‚ β”‚ β”‚ * β”‚ β”‚ β”‚- Emit β”‚ * β”‚ β”‚ β”‚ Camera::requestCompleteβ”‚ * β”‚ requestCompleted(Request *request) ◄───────────────────────┼─┼──── β”‚ * β”‚ β”‚ β”‚ β”‚ * β”‚ - Check request completion status β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ * β”‚ β”‚ * β”‚ - if (pendingStreamsToProcess > 0) β”‚ * β”‚ Queue all entries from pendingStreamsToProcess β”‚ * β”‚ else β”‚ β”‚ * β”‚ completeDescriptor() β”‚ └──────────────────────┐ * β”‚ β”‚ β”‚ * β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ * β”‚ β”‚ β”‚ β”‚ β”‚ * β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β–Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β–Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β” β–Ό β”‚ * β”‚ β”‚CameraStream1 β”‚ β”‚CameraStream2 β”‚ .... β”‚ * β”‚ β”œβ”¬β”€β”€β”€β”¬β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ β”œβ”¬β”€β”€β”€β”¬β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ β”‚ * β”‚ β”‚β”‚ β”‚ β”‚ β”‚ β”‚β”‚ β”‚ β”‚ β”‚ β”‚ * β”‚ │▼───▼───▼─────────────── │▼───▼───▼───────────── β”‚ * β”‚ β”‚PostProcessorWorker β”‚ β”‚PostProcessorWorker β”‚ β”‚ * β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ * β”‚ β”‚ +------------------+ β”‚ β”‚ +------------------+β”‚ β”‚ * β”‚ β”‚ | PostProcessor | β”‚ β”‚ | PostProcessor |β”‚ β”‚ * β”‚ β”‚ | process() | β”‚ β”‚ | process() |β”‚ β”‚ * β”‚ β”‚ | | β”‚ β”‚ | |β”‚ β”‚ * β”‚ β”‚ | Emit | β”‚ β”‚ | Emit |β”‚ β”‚ * β”‚ β”‚ | processComplete | β”‚ β”‚ | processComplete |β”‚ β”‚ * β”‚ β”‚ | | β”‚ β”‚ | |β”‚ β”‚ * β”‚ β”‚ +--------------β”‚---+ β”‚ β”‚ +--------------β”‚---+β”‚ β”‚ * β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ * β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ * β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”˜ β”‚ * β”‚ β”‚ β”‚ β”‚ * β”‚ β”‚ β”‚ β”‚ * β”‚ β”‚ β”‚ β”‚ * β”‚ β–Ό β–Ό β”‚ * β”‚ +---------------------------------------+ +--------------+ β”‚ * β”‚ | CameraDevice | | | β”‚ * β”‚ | | | | β”‚ * β”‚ | streamProcessingComplete() | | | β”‚ * β”‚ | | | | β”‚ * β”‚ | - Check and set buffer status | | .... | β”‚ * β”‚ | - Remove post+processing entry | | | β”‚ * β”‚ | from pendingStreamsToProcess | | | β”‚ * β”‚ | | | | β”‚ * β”‚ | - if (pendingStreamsToProcess.empty())| | | β”‚ * β”‚ | completeDescriptor | | | β”‚ * β”‚ | | | | β”‚ * β”‚ +---------------------------------------+ +--------------+ β”‚ * β”‚ β”‚ * β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ * * +-------------+ * | | - PostProcessorWorker's thread * | | * +-------------+ */ Camera3RequestDescriptor::Camera3RequestDescriptor( Camera *camera, const camera3_capture_request_t *camera3Request) { frameNumber_ = camera3Request->frame_number; /* Copy the camera3 request stream information for later access. */ const Span<const camera3_stream_buffer_t> buffers{ camera3Request->output_buffers, camera3Request->num_output_buffers }; buffers_.reserve(buffers.size()); for (const camera3_stream_buffer_t &buffer : buffers) { CameraStream *stream = static_cast<CameraStream *>(buffer.stream->priv); buffers_.emplace_back(stream, buffer, this); } /* Clone the controls associated with the camera3 request. */ settings_ = CameraMetadata(camera3Request->settings); /* * Create the CaptureRequest, stored as a unique_ptr<> to tie its * lifetime to the descriptor. */ request_ = camera->createRequest(reinterpret_cast<uint64_t>(this)); } Camera3RequestDescriptor::~Camera3RequestDescriptor() = default; /** * \struct Camera3RequestDescriptor::StreamBuffer * \brief Group information for per-stream buffer of Camera3RequestDescriptor * * A capture request placed to the libcamera HAL can contain multiple streams. * Each stream will have an associated buffer to be filled. StreamBuffer * tracks this buffer with contextual information which aids in the stream's * generation. The generation of the stream will depend on its type (refer to * the CameraStream::Type documentation). * * \var Camera3RequestDescriptor::StreamBuffer::stream * \brief Pointer to the corresponding CameraStream * * \var Camera3RequestDescriptor::StreamBuffer::camera3Buffer * \brief Native handle to the buffer * * \var Camera3RequestDescriptor::StreamBuffer::frameBuffer * \brief Encapsulate the dmabuf handle inside a libcamera::FrameBuffer for * direct streams * * \var Camera3RequestDescriptor::StreamBuffer::fence * \brief Acquire fence of the buffer * * \var Camera3RequestDescriptor::StreamBuffer::status * \brief Track the status of the buffer * * \var Camera3RequestDescriptor::StreamBuffer::internalBuffer * \brief Pointer to a buffer internally handled by CameraStream (if any) * * \var Camera3RequestDescriptor::StreamBuffer::srcBuffer * \brief Pointer to the source frame buffer used for post-processing * * \var Camera3RequestDescriptor::StreamBuffer::dstBuffer * \brief Pointer to the destination frame buffer used for post-processing * * \var Camera3RequestDescriptor::StreamBuffer::request * \brief Back pointer to the Camera3RequestDescriptor to which the StreamBuffer belongs */ Camera3RequestDescriptor::StreamBuffer::StreamBuffer( CameraStream *cameraStream, const camera3_stream_buffer_t &buffer, Camera3RequestDescriptor *requestDescriptor) : stream(cameraStream), camera3Buffer(buffer.buffer), fence(buffer.acquire_fence), request(requestDescriptor) { } Camera3RequestDescriptor::StreamBuffer::~StreamBuffer() = default; Camera3RequestDescriptor::StreamBuffer::StreamBuffer(StreamBuffer &&) = default; Camera3RequestDescriptor::StreamBuffer & Camera3RequestDescriptor::StreamBuffer::operator=(Camera3RequestDescriptor::StreamBuffer &&) = default;
0
repos/libcamera/src
repos/libcamera/src/android/camera_ops.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Android Camera HAL Operations */ #include "camera_ops.h" #include <system/camera_metadata.h> #include "camera_device.h" using namespace libcamera; /* * Translation layer between the Android Camera HAL device operations and the * CameraDevice. */ static int hal_dev_initialize(const struct camera3_device *dev, const camera3_callback_ops_t *callback_ops) { if (!dev) return -EINVAL; CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); camera->setCallbacks(callback_ops); return 0; } static int hal_dev_configure_streams(const struct camera3_device *dev, camera3_stream_configuration_t *stream_list) { if (!dev) return -EINVAL; CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); return camera->configureStreams(stream_list); } static const camera_metadata_t * hal_dev_construct_default_request_settings(const struct camera3_device *dev, int type) { if (!dev) return nullptr; CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); return camera->constructDefaultRequestSettings(type); } static int hal_dev_process_capture_request(const struct camera3_device *dev, camera3_capture_request_t *request) { if (!dev) return -EINVAL; CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); return camera->processCaptureRequest(request); } static void hal_dev_dump([[maybe_unused]] const struct camera3_device *dev, [[maybe_unused]] int fd) { } static int hal_dev_flush(const struct camera3_device *dev) { if (!dev) return -EINVAL; CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); camera->flush(); return 0; } int hal_dev_close(hw_device_t *hw_device) { if (!hw_device) return -EINVAL; camera3_device_t *dev = reinterpret_cast<camera3_device_t *>(hw_device); CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv); camera->close(); return 0; } camera3_device_ops hal_dev_ops = { .initialize = hal_dev_initialize, .configure_streams = hal_dev_configure_streams, .register_stream_buffers = nullptr, .construct_default_request_settings = hal_dev_construct_default_request_settings, .process_capture_request = hal_dev_process_capture_request, .get_metadata_vendor_tag_ops = nullptr, .dump = hal_dev_dump, .flush = hal_dev_flush, .reserved = { nullptr }, };
0
repos/libcamera/src
repos/libcamera/src/android/camera_metadata.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Metadata Helper */ #pragma once #include <stdint.h> #include <vector> #include <system/camera_metadata.h> class CameraMetadata { public: CameraMetadata(); CameraMetadata(size_t entryCapacity, size_t dataCapacity); CameraMetadata(const camera_metadata_t *metadata); CameraMetadata(const CameraMetadata &other); ~CameraMetadata(); CameraMetadata &operator=(const CameraMetadata &other); std::tuple<size_t, size_t> usage() const; bool resized() const { return resized_; } bool isValid() const { return valid_; } bool getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const; template<typename T> bool entryContains(uint32_t tag, T value) const; bool hasEntry(uint32_t tag) const; template<typename T, std::enable_if_t<std::is_arithmetic_v<T> || std::is_enum_v<T>> * = nullptr> bool setEntry(uint32_t tag, const T &data) { if (hasEntry(tag)) return updateEntry(tag, &data, 1, sizeof(T)); else return addEntry(tag, &data, 1, sizeof(T)); } template<typename T, std::enable_if_t<std::is_arithmetic_v<T> || std::is_enum_v<T>> * = nullptr> bool addEntry(uint32_t tag, const T &data) { return addEntry(tag, &data, 1, sizeof(T)); } template<typename T, size_t size> bool addEntry(uint32_t tag, const T (&data)[size]) { return addEntry(tag, data, size, sizeof(T)); } template<typename S, typename T = typename S::value_type> bool addEntry(uint32_t tag, const S &data) { return addEntry(tag, data.data(), data.size(), sizeof(T)); } template<typename T> bool addEntry(uint32_t tag, const T *data, size_t count) { return addEntry(tag, data, count, sizeof(T)); } template<typename T> bool updateEntry(uint32_t tag, const T &data) { return updateEntry(tag, &data, 1, sizeof(T)); } template<typename T, size_t size> bool updateEntry(uint32_t tag, const T (&data)[size]) { return updateEntry(tag, data, size, sizeof(T)); } template<typename S, typename T = typename S::value_type> bool updateEntry(uint32_t tag, const S &data) { return updateEntry(tag, data.data(), data.size(), sizeof(T)); } template<typename T> bool updateEntry(uint32_t tag, const T *data, size_t count) { return updateEntry(tag, data, count, sizeof(T)); } camera_metadata_t *getMetadata(); const camera_metadata_t *getMetadata() const; private: bool resize(size_t count, size_t size); bool addEntry(uint32_t tag, const void *data, size_t count, size_t elementSize); bool updateEntry(uint32_t tag, const void *data, size_t count, size_t elementSize); camera_metadata_t *metadata_; bool valid_; bool resized_; };
0
repos/libcamera/src
repos/libcamera/src/android/hal_framebuffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * HAL Frame Buffer Handling */ #include "hal_framebuffer.h" #include <hardware/camera3.h> HALFrameBuffer::HALFrameBuffer(std::unique_ptr<Private> d, buffer_handle_t handle) : FrameBuffer(std::move(d)), handle_(handle) { } HALFrameBuffer::HALFrameBuffer(const std::vector<Plane> &planes, buffer_handle_t handle) : FrameBuffer(planes), handle_(handle) { }
0
repos/libcamera/src
repos/libcamera/src/android/camera_metadata.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Metadata Helper */ #include "camera_metadata.h" #include <libcamera/base/log.h> using namespace libcamera; LOG_DEFINE_CATEGORY(CameraMetadata) CameraMetadata::CameraMetadata() : metadata_(nullptr), valid_(false), resized_(false) { } CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) : resized_(false) { metadata_ = allocate_camera_metadata(entryCapacity, dataCapacity); valid_ = metadata_ != nullptr; } CameraMetadata::CameraMetadata(const camera_metadata_t *metadata) : resized_(false) { metadata_ = clone_camera_metadata(metadata); valid_ = metadata_ != nullptr; } CameraMetadata::CameraMetadata(const CameraMetadata &other) : CameraMetadata(other.getMetadata()) { } CameraMetadata::~CameraMetadata() { if (metadata_) free_camera_metadata(metadata_); } CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) { if (this == &other) return *this; if (metadata_) free_camera_metadata(metadata_); metadata_ = clone_camera_metadata(other.getMetadata()); valid_ = metadata_ != nullptr; return *this; } std::tuple<size_t, size_t> CameraMetadata::usage() const { size_t currentEntryCount = get_camera_metadata_entry_count(metadata_); size_t currentDataCount = get_camera_metadata_data_count(metadata_); return { currentEntryCount, currentDataCount }; } bool CameraMetadata::getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const { if (find_camera_metadata_ro_entry(metadata_, tag, entry)) return false; return true; } /* * \brief Resize the metadata container, if necessary * \param[in] count Number of entries to add to the container * \param[in] size Total size of entries to add, in bytes * \return True if resize was successful or unnecessary, false otherwise */ bool CameraMetadata::resize(size_t count, size_t size) { if (!valid_) return false; if (!count && !size) return true; size_t currentEntryCount = get_camera_metadata_entry_count(metadata_); size_t currentEntryCapacity = get_camera_metadata_entry_capacity(metadata_); size_t newEntryCapacity = currentEntryCapacity < currentEntryCount + count ? currentEntryCapacity * 2 : currentEntryCapacity; size_t currentDataCount = get_camera_metadata_data_count(metadata_); size_t currentDataCapacity = get_camera_metadata_data_capacity(metadata_); size_t newDataCapacity = currentDataCapacity < currentDataCount + size ? currentDataCapacity * 2 : currentDataCapacity; if (newEntryCapacity > currentEntryCapacity || newDataCapacity > currentDataCapacity) { camera_metadata_t *oldMetadata = metadata_; metadata_ = allocate_camera_metadata(newEntryCapacity, newDataCapacity); if (!metadata_) { metadata_ = oldMetadata; return false; } LOG(CameraMetadata, Info) << "Resized: old entry capacity " << currentEntryCapacity << ", old data capacity " << currentDataCapacity << ", new entry capacity " << newEntryCapacity << ", new data capacity " << newDataCapacity; append_camera_metadata(metadata_, oldMetadata); free_camera_metadata(oldMetadata); resized_ = true; } return true; } template<> bool CameraMetadata::entryContains(uint32_t tag, uint8_t value) const { camera_metadata_ro_entry_t entry; if (!getEntry(tag, &entry)) return false; for (unsigned int i = 0; i < entry.count; i++) { if (entry.data.u8[i] == value) return true; } return false; } bool CameraMetadata::hasEntry(uint32_t tag) const { camera_metadata_ro_entry_t entry; return getEntry(tag, &entry); } bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count, size_t elementSize) { if (!valid_) return false; if (!resize(1, count * elementSize)) { LOG(CameraMetadata, Error) << "Failed to resize"; valid_ = false; return false; } if (!add_camera_metadata_entry(metadata_, tag, data, count)) return true; const char *name = get_camera_metadata_tag_name(tag); if (name) LOG(CameraMetadata, Error) << "Failed to add tag " << name; else LOG(CameraMetadata, Error) << "Failed to add unknown tag " << tag; valid_ = false; return false; } bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count, size_t elementSize) { if (!valid_) return false; camera_metadata_entry_t entry; int ret = find_camera_metadata_entry(metadata_, tag, &entry); if (ret) { const char *name = get_camera_metadata_tag_name(tag); LOG(CameraMetadata, Error) << "Failed to update tag " << (name ? name : "<unknown>") << ": not present"; return false; } if (camera_metadata_type_size[entry.type] != elementSize) { const char *name = get_camera_metadata_tag_name(tag); LOG(CameraMetadata, Fatal) << "Invalid element size for tag " << (name ? name : "<unknown>"); return false; } size_t oldSize = calculate_camera_metadata_entry_data_size(entry.type, entry.count); size_t newSize = calculate_camera_metadata_entry_data_size(entry.type, count); size_t sizeIncrement = newSize - oldSize > 0 ? newSize - oldSize : 0; if (!resize(0, sizeIncrement)) { LOG(CameraMetadata, Error) << "Failed to resize"; valid_ = false; return false; } ret = update_camera_metadata_entry(metadata_, entry.index, data, count, nullptr); if (!ret) return true; const char *name = get_camera_metadata_tag_name(tag); LOG(CameraMetadata, Error) << "Failed to update tag " << (name ? name : "<unknown>"); valid_ = false; return false; } camera_metadata_t *CameraMetadata::getMetadata() { return valid_ ? metadata_ : nullptr; } const camera_metadata_t *CameraMetadata::getMetadata() const { return valid_ ? metadata_ : nullptr; }
0
repos/libcamera/src
repos/libcamera/src/android/frame_buffer_allocator.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Interface definition to allocate Frame buffer in * platform dependent way. */ #ifndef __ANDROID_FRAME_BUFFER_ALLOCATOR_H__ #define __ANDROID_FRAME_BUFFER_ALLOCATOR_H__ #include <memory> #include <libcamera/base/class.h> #include <libcamera/camera.h> #include <libcamera/geometry.h> #include "hal_framebuffer.h" class CameraDevice; class PlatformFrameBufferAllocator : libcamera::Extensible { LIBCAMERA_DECLARE_PRIVATE() public: explicit PlatformFrameBufferAllocator(CameraDevice *const cameraDevice); ~PlatformFrameBufferAllocator(); /* * FrameBuffer owns the underlying buffer. Returns nullptr on failure. * Note: The returned FrameBuffer needs to be destroyed before * PlatformFrameBufferAllocator is destroyed. */ std::unique_ptr<HALFrameBuffer> allocate( int halPixelFormat, const libcamera::Size &size, uint32_t usage); }; #define PUBLIC_FRAME_BUFFER_ALLOCATOR_IMPLEMENTATION \ PlatformFrameBufferAllocator::PlatformFrameBufferAllocator( \ CameraDevice *const cameraDevice) \ : Extensible(std::make_unique<Private>(cameraDevice)) \ { \ } \ PlatformFrameBufferAllocator::~PlatformFrameBufferAllocator() \ { \ } \ std::unique_ptr<HALFrameBuffer> \ PlatformFrameBufferAllocator::allocate(int halPixelFormat, \ const libcamera::Size &size, \ uint32_t usage) \ { \ return _d()->allocate(halPixelFormat, size, usage); \ } #endif /* __ANDROID_FRAME_BUFFER_ALLOCATOR_H__ */
0
repos/libcamera/src
repos/libcamera/src/android/hal_framebuffer.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * HAL Frame Buffer Handling */ #pragma once #include "libcamera/internal/framebuffer.h" #include <hardware/camera3.h> class HALFrameBuffer final : public libcamera::FrameBuffer { public: HALFrameBuffer(std::unique_ptr<Private> d, buffer_handle_t handle); HALFrameBuffer(const std::vector<Plane> &planes, buffer_handle_t handle); buffer_handle_t handle() const { return handle_; } private: buffer_handle_t handle_; };
0
repos/libcamera/src
repos/libcamera/src/android/camera_device.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Device */ #include "camera_device.h" #include <algorithm> #include <fstream> #include <set> #include <sys/mman.h> #include <unistd.h> #include <vector> #include <libcamera/base/log.h> #include <libcamera/base/unique_fd.h> #include <libcamera/base/utils.h> #include <libcamera/control_ids.h> #include <libcamera/controls.h> #include <libcamera/fence.h> #include <libcamera/formats.h> #include <libcamera/property_ids.h> #include "system/graphics.h" #include "camera_buffer.h" #include "camera_hal_config.h" #include "camera_ops.h" #include "camera_request.h" #include "hal_framebuffer.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) namespace { /* * \struct Camera3StreamConfig * \brief Data to store StreamConfiguration associated with camera3_stream(s) * \var streams List of the pairs of a stream requested by Android HAL client * and CameraStream::Type associated with the stream * \var config StreamConfiguration for streams */ struct Camera3StreamConfig { struct Camera3Stream { camera3_stream_t *stream; CameraStream::Type type; }; std::vector<Camera3Stream> streams; StreamConfiguration config; }; /* * Reorder the configurations so that libcamera::Camera can accept them as much * as possible. The sort rule is as follows. * 1.) The configuration for NV12 request whose resolution is the largest. * 2.) The configuration for JPEG request. * 3.) Others. Larger resolutions and different formats are put earlier. */ void sortCamera3StreamConfigs(std::vector<Camera3StreamConfig> &unsortedConfigs, const camera3_stream_t *jpegStream) { const Camera3StreamConfig *jpegConfig = nullptr; std::map<PixelFormat, std::vector<const Camera3StreamConfig *>> formatToConfigs; for (const auto &streamConfig : unsortedConfigs) { if (jpegStream && !jpegConfig) { const auto &streams = streamConfig.streams; if (std::find_if(streams.begin(), streams.end(), [jpegStream](const auto &stream) { return stream.stream == jpegStream; }) != streams.end()) { jpegConfig = &streamConfig; continue; } } formatToConfigs[streamConfig.config.pixelFormat].push_back(&streamConfig); } if (jpegStream && !jpegConfig) LOG(HAL, Fatal) << "No Camera3StreamConfig is found for JPEG"; for (auto &fmt : formatToConfigs) { auto &streamConfigs = fmt.second; /* Sorted by resolution. Smaller is put first. */ std::sort(streamConfigs.begin(), streamConfigs.end(), [](const auto *streamConfigA, const auto *streamConfigB) { const Size &sizeA = streamConfigA->config.size; const Size &sizeB = streamConfigB->config.size; return sizeA < sizeB; }); } std::vector<Camera3StreamConfig> sortedConfigs; sortedConfigs.reserve(unsortedConfigs.size()); /* * NV12 is the most prioritized format. Put the configuration with NV12 * and the largest resolution first. */ const auto nv12It = formatToConfigs.find(formats::NV12); if (nv12It != formatToConfigs.end()) { auto &nv12Configs = nv12It->second; const Camera3StreamConfig *nv12Largest = nv12Configs.back(); /* * If JPEG will be created from NV12 and the size is larger than * the largest NV12 configurations, then put the NV12 * configuration for JPEG first. */ if (jpegConfig && jpegConfig->config.pixelFormat == formats::NV12) { const Size &nv12SizeForJpeg = jpegConfig->config.size; const Size &nv12LargestSize = nv12Largest->config.size; if (nv12LargestSize < nv12SizeForJpeg) { LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString(); sortedConfigs.push_back(std::move(*jpegConfig)); jpegConfig = nullptr; } } LOG(HAL, Debug) << "Insert " << nv12Largest->config.toString(); sortedConfigs.push_back(*nv12Largest); nv12Configs.pop_back(); if (nv12Configs.empty()) formatToConfigs.erase(nv12It); } /* If the configuration for JPEG is there, then put it. */ if (jpegConfig) { LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString(); sortedConfigs.push_back(std::move(*jpegConfig)); jpegConfig = nullptr; } /* * Put configurations with different formats and larger resolutions * earlier. */ while (!formatToConfigs.empty()) { for (auto it = formatToConfigs.begin(); it != formatToConfigs.end();) { auto &configs = it->second; LOG(HAL, Debug) << "Insert " << configs.back()->config.toString(); sortedConfigs.push_back(*configs.back()); configs.pop_back(); if (configs.empty()) it = formatToConfigs.erase(it); else it++; } } ASSERT(sortedConfigs.size() == unsortedConfigs.size()); unsortedConfigs = sortedConfigs; } const char *rotationToString(int rotation) { switch (rotation) { case CAMERA3_STREAM_ROTATION_0: return "0"; case CAMERA3_STREAM_ROTATION_90: return "90"; case CAMERA3_STREAM_ROTATION_180: return "180"; case CAMERA3_STREAM_ROTATION_270: return "270"; } return "INVALID"; } const char *directionToString(int stream_type) { switch (stream_type) { case CAMERA3_STREAM_OUTPUT: return "Output"; case CAMERA3_STREAM_INPUT: return "Input"; case CAMERA3_STREAM_BIDIRECTIONAL: return "Bidirectional"; default: LOG(HAL, Warning) << "Unknown stream type: " << stream_type; return "Unknown"; } } #if defined(OS_CHROMEOS) /* * Check whether the crop_rotate_scale_degrees values for all streams in * the list are valid according to the Chrome OS camera HAL API. */ bool validateCropRotate(const camera3_stream_configuration_t &streamList) { ASSERT(streamList.num_streams > 0); const int cropRotateScaleDegrees = streamList.streams[0]->crop_rotate_scale_degrees; for (unsigned int i = 0; i < streamList.num_streams; ++i) { const camera3_stream_t &stream = *streamList.streams[i]; switch (stream.crop_rotate_scale_degrees) { case CAMERA3_STREAM_ROTATION_0: case CAMERA3_STREAM_ROTATION_90: case CAMERA3_STREAM_ROTATION_270: break; /* 180Β° rotation is specified by Chrome OS as invalid. */ case CAMERA3_STREAM_ROTATION_180: default: LOG(HAL, Error) << "Invalid crop_rotate_scale_degrees: " << stream.crop_rotate_scale_degrees; return false; } if (cropRotateScaleDegrees != stream.crop_rotate_scale_degrees) { LOG(HAL, Error) << "crop_rotate_scale_degrees in all " << "streams are not identical"; return false; } } return true; } #endif } /* namespace */ /* * \class CameraDevice * * The CameraDevice class wraps a libcamera::Camera instance, and implements * the camera3_device_t interface, bridging calls received from the Android * camera service to the CameraDevice. * * The class translates parameters and operations from the Camera HALv3 API to * the libcamera API to provide static information for a Camera, create request * templates for it, process capture requests and then deliver capture results * back to the framework using the designated callbacks. */ CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera) : id_(id), state_(State::Stopped), camera_(std::move(camera)), facing_(CAMERA_FACING_FRONT), orientation_(0) { camera_->requestCompleted.connect(this, &CameraDevice::requestComplete); maker_ = "libcamera"; model_ = "cameraModel"; /* \todo Support getting properties on Android */ std::ifstream fstream("/var/cache/camera/camera.prop"); if (!fstream.is_open()) return; std::string line; while (std::getline(fstream, line)) { std::string::size_type delimPos = line.find("="); if (delimPos == std::string::npos) continue; std::string key = line.substr(0, delimPos); std::string val = line.substr(delimPos + 1); if (!key.compare("ro.product.model")) model_ = val; else if (!key.compare("ro.product.manufacturer")) maker_ = val; } } CameraDevice::~CameraDevice() = default; std::unique_ptr<CameraDevice> CameraDevice::create(unsigned int id, std::shared_ptr<Camera> cam) { return std::unique_ptr<CameraDevice>( new CameraDevice(id, std::move(cam))); } /* * Initialize the camera static information retrieved from the * Camera::properties or from the cameraConfigData. * * cameraConfigData is optional for external camera devices and can be * nullptr. * * This function is called before the camera device is opened. */ int CameraDevice::initialize(const CameraConfigData *cameraConfigData) { /* * Initialize orientation and facing side of the camera. * * If the libcamera::Camera provides those information as retrieved * from firmware use them, otherwise fallback to values parsed from * the configuration file. If the configuration file is not available * the camera is external so its location and rotation can be safely * defaulted. */ const ControlList &properties = camera_->properties(); const auto &location = properties.get(properties::Location); if (location) { switch (*location) { case properties::CameraLocationFront: facing_ = CAMERA_FACING_FRONT; break; case properties::CameraLocationBack: facing_ = CAMERA_FACING_BACK; break; case properties::CameraLocationExternal: /* * If the camera is reported as external, but the * CameraHalManager has overriden it, use what is * reported in the configuration file. This typically * happens for UVC cameras reported as 'External' by * libcamera but installed in fixed position on the * device. */ if (cameraConfigData && cameraConfigData->facing != -1) facing_ = cameraConfigData->facing; else facing_ = CAMERA_FACING_EXTERNAL; break; } if (cameraConfigData && cameraConfigData->facing != -1 && facing_ != cameraConfigData->facing) { LOG(HAL, Warning) << "Camera location does not match" << " configuration file. Using " << facing_; } } else if (cameraConfigData) { if (cameraConfigData->facing == -1) { LOG(HAL, Error) << "Camera facing not in configuration file"; return -EINVAL; } facing_ = cameraConfigData->facing; } else { facing_ = CAMERA_FACING_EXTERNAL; } /* * The Android orientation metadata specifies its rotation correction * value in clockwise direction whereas libcamera specifies the * rotation property in anticlockwise direction. Read the libcamera's * rotation property (anticlockwise) and compute the corresponding * value for clockwise direction as required by the Android orientation * metadata. */ const auto &rotation = properties.get(properties::Rotation); if (rotation) { orientation_ = (360 - *rotation) % 360; if (cameraConfigData && cameraConfigData->rotation != -1 && orientation_ != cameraConfigData->rotation) { LOG(HAL, Warning) << "Camera orientation does not match" << " configuration file. Using " << orientation_; } } else if (cameraConfigData) { if (cameraConfigData->rotation == -1) { LOG(HAL, Error) << "Camera rotation not in configuration file"; return -EINVAL; } orientation_ = cameraConfigData->rotation; } else { orientation_ = 0; } return capabilities_.initialize(camera_, orientation_, facing_); } /* * Open a camera device. The static information on the camera shall have been * initialized with a call to CameraDevice::initialize(). */ int CameraDevice::open(const hw_module_t *hardwareModule) { int ret = camera_->acquire(); if (ret) { LOG(HAL, Error) << "Failed to acquire the camera"; return ret; } /* Initialize the hw_device_t in the instance camera3_module_t. */ camera3Device_.common.tag = HARDWARE_DEVICE_TAG; camera3Device_.common.version = CAMERA_DEVICE_API_VERSION_3_3; camera3Device_.common.module = (hw_module_t *)hardwareModule; camera3Device_.common.close = hal_dev_close; /* * The camera device operations. These actually implement * the Android Camera HALv3 interface. */ camera3Device_.ops = &hal_dev_ops; camera3Device_.priv = this; return 0; } void CameraDevice::close() { stop(); camera_->release(); } void CameraDevice::flush() { { MutexLocker stateLock(stateMutex_); if (state_ != State::Running) return; state_ = State::Flushing; } camera_->stop(); MutexLocker stateLock(stateMutex_); state_ = State::Stopped; } void CameraDevice::stop() { MutexLocker stateLock(stateMutex_); camera_->stop(); { MutexLocker descriptorsLock(descriptorsMutex_); descriptors_ = {}; } streams_.clear(); state_ = State::Stopped; } unsigned int CameraDevice::maxJpegBufferSize() const { return capabilities_.maxJpegBufferSize(); } void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks) { callbacks_ = callbacks; } const camera_metadata_t *CameraDevice::getStaticMetadata() { return capabilities_.staticMetadata()->getMetadata(); } /* * Produce a metadata pack to be used as template for a capture request. */ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type) { auto it = requestTemplates_.find(type); if (it != requestTemplates_.end()) return it->second->getMetadata(); /* Use the capture intent matching the requested template type. */ std::unique_ptr<CameraMetadata> requestTemplate; uint8_t captureIntent; switch (type) { case CAMERA3_TEMPLATE_PREVIEW: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; requestTemplate = capabilities_.requestTemplatePreview(); break; case CAMERA3_TEMPLATE_STILL_CAPTURE: /* * Use the preview template for still capture, they only differ * for the torch mode we currently do not support. */ captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; requestTemplate = capabilities_.requestTemplateStill(); break; case CAMERA3_TEMPLATE_VIDEO_RECORD: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; requestTemplate = capabilities_.requestTemplateVideo(); break; case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; requestTemplate = capabilities_.requestTemplateVideo(); break; case CAMERA3_TEMPLATE_MANUAL: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; requestTemplate = capabilities_.requestTemplateManual(); break; /* \todo Implement templates generation for the remaining use cases. */ case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: default: LOG(HAL, Error) << "Unsupported template request type: " << type; return nullptr; } if (!requestTemplate || !requestTemplate->isValid()) { LOG(HAL, Error) << "Failed to construct request template"; return nullptr; } requestTemplate->updateEntry(ANDROID_CONTROL_CAPTURE_INTENT, captureIntent); requestTemplates_[type] = std::move(requestTemplate); return requestTemplates_[type]->getMetadata(); } /* * Inspect the stream_list to produce a list of StreamConfiguration to * be use to configure the Camera. */ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) { /* Before any configuration attempt, stop the camera. */ stop(); if (stream_list->num_streams == 0) { LOG(HAL, Error) << "No streams in configuration"; return -EINVAL; } #if defined(OS_CHROMEOS) if (!validateCropRotate(*stream_list)) return -EINVAL; #endif /* * Generate an empty configuration, and construct a StreamConfiguration * for each camera3_stream to add to it. */ std::unique_ptr<CameraConfiguration> config = camera_->generateConfiguration(); if (!config) { LOG(HAL, Error) << "Failed to generate camera configuration"; return -EINVAL; } /* * Clear and remove any existing configuration from previous calls, and * ensure the required entries are available without further * reallocation. */ streams_.clear(); streams_.reserve(stream_list->num_streams); std::vector<Camera3StreamConfig> streamConfigs; streamConfigs.reserve(stream_list->num_streams); /* First handle all non-MJPEG streams. */ camera3_stream_t *jpegStream = nullptr; for (unsigned int i = 0; i < stream_list->num_streams; ++i) { camera3_stream_t *stream = stream_list->streams[i]; Size size(stream->width, stream->height); PixelFormat format = capabilities_.toPixelFormat(stream->format); LOG(HAL, Info) << "Stream #" << i << ", direction: " << directionToString(stream->stream_type) << ", width: " << stream->width << ", height: " << stream->height << ", format: " << utils::hex(stream->format) << ", rotation: " << rotationToString(stream->rotation) #if defined(OS_CHROMEOS) << ", crop_rotate_scale_degrees: " << rotationToString(stream->crop_rotate_scale_degrees) #endif << " (" << format << ")"; if (!format.isValid()) return -EINVAL; /* \todo Support rotation. */ if (stream->rotation != CAMERA3_STREAM_ROTATION_0) { LOG(HAL, Error) << "Rotation is not supported"; return -EINVAL; } #if defined(OS_CHROMEOS) if (stream->crop_rotate_scale_degrees != CAMERA3_STREAM_ROTATION_0) { LOG(HAL, Error) << "Rotation is not supported"; return -EINVAL; } #endif /* Defer handling of MJPEG streams until all others are known. */ if (stream->format == HAL_PIXEL_FORMAT_BLOB) { if (jpegStream) { LOG(HAL, Error) << "Multiple JPEG streams are not supported"; return -EINVAL; } jpegStream = stream; continue; } /* * While gralloc usage flags are supposed to report usage * patterns to select a suitable buffer allocation strategy, in * practice they're also used to make other decisions, such as * selecting the actual format for the IMPLEMENTATION_DEFINED * HAL pixel format. To avoid issues, we thus have to set the * GRALLOC_USAGE_HW_CAMERA_WRITE flag unconditionally, even for * streams that will be produced in software. */ stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE; /* * If a CameraStream with the same size and format as the * current stream has already been requested, associate the two. */ auto iter = std::find_if( streamConfigs.begin(), streamConfigs.end(), [&size, &format](const Camera3StreamConfig &streamConfig) { return streamConfig.config.size == size && streamConfig.config.pixelFormat == format; }); if (iter != streamConfigs.end()) { /* Add usage to copy the buffer in streams[0] to stream. */ iter->streams[0].stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN; stream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN; iter->streams.push_back({ stream, CameraStream::Type::Mapped }); continue; } Camera3StreamConfig streamConfig; streamConfig.streams = { { stream, CameraStream::Type::Direct } }; streamConfig.config.size = size; streamConfig.config.pixelFormat = format; streamConfigs.push_back(std::move(streamConfig)); } /* Now handle the MJPEG streams, adding a new stream if required. */ if (jpegStream) { CameraStream::Type type; int index = -1; /* Search for a compatible stream in the non-JPEG ones. */ for (size_t i = 0; i < streamConfigs.size(); ++i) { Camera3StreamConfig &streamConfig = streamConfigs[i]; const auto &cfg = streamConfig.config; /* * \todo The PixelFormat must also be compatible with * the encoder. */ if (cfg.size.width != jpegStream->width || cfg.size.height != jpegStream->height) continue; LOG(HAL, Info) << "Android JPEG stream mapped to libcamera stream " << i; type = CameraStream::Type::Mapped; index = i; /* * The source stream will be read by software to * produce the JPEG stream. */ camera3_stream_t *stream = streamConfig.streams[0].stream; stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN; break; } /* * Without a compatible match for JPEG encoding we must * introduce a new stream to satisfy the request requirements. */ if (index < 0) { /* * \todo The pixelFormat should be a 'best-fit' choice * and may require a validation cycle. This is not yet * handled, and should be considered as part of any * stream configuration reworks. */ Camera3StreamConfig streamConfig; streamConfig.config.size.width = jpegStream->width; streamConfig.config.size.height = jpegStream->height; streamConfig.config.pixelFormat = formats::NV12; streamConfigs.push_back(std::move(streamConfig)); LOG(HAL, Info) << "Adding " << streamConfig.config.toString() << " for MJPEG support"; type = CameraStream::Type::Internal; index = streamConfigs.size() - 1; } /* The JPEG stream will be produced by software. */ jpegStream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN; streamConfigs[index].streams.push_back({ jpegStream, type }); } sortCamera3StreamConfigs(streamConfigs, jpegStream); for (const auto &streamConfig : streamConfigs) { config->addConfiguration(streamConfig.config); CameraStream *sourceStream = nullptr; for (auto &stream : streamConfig.streams) { streams_.emplace_back(this, config.get(), stream.type, stream.stream, sourceStream, config->size() - 1); stream.stream->priv = static_cast<void *>(&streams_.back()); /* * The streamConfig.streams vector contains as its first * element a Direct (or Internal) stream, and then an * optional set of Mapped streams derived from the * Direct stream. Cache the Direct stream pointer, to * be used when constructing the subsequent mapped * streams. */ if (stream.type == CameraStream::Type::Direct) sourceStream = &streams_.back(); } } switch (config->validate()) { case CameraConfiguration::Valid: break; case CameraConfiguration::Adjusted: LOG(HAL, Info) << "Camera configuration adjusted"; for (const StreamConfiguration &cfg : *config) LOG(HAL, Info) << " - " << cfg.toString(); return -EINVAL; case CameraConfiguration::Invalid: LOG(HAL, Info) << "Camera configuration invalid"; return -EINVAL; } /* * Once the CameraConfiguration has been adjusted/validated * it can be applied to the camera. */ int ret = camera_->configure(config.get()); if (ret) { LOG(HAL, Error) << "Failed to configure camera '" << camera_->id() << "'"; return ret; } /* * Configure the HAL CameraStream instances using the associated * StreamConfiguration and set the number of required buffers in * the Android camera3_stream_t. */ for (CameraStream &cameraStream : streams_) { ret = cameraStream.configure(); if (ret) { LOG(HAL, Error) << "Failed to configure camera stream"; return ret; } } config_ = std::move(config); return 0; } std::unique_ptr<HALFrameBuffer> CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer, PixelFormat pixelFormat, const Size &size) { CameraBuffer buf(camera3buffer, pixelFormat, size, PROT_READ); if (!buf.isValid()) { LOG(HAL, Fatal) << "Failed to create CameraBuffer"; return nullptr; } std::vector<FrameBuffer::Plane> planes(buf.numPlanes()); for (size_t i = 0; i < buf.numPlanes(); ++i) { SharedFD fd{ camera3buffer->data[i] }; if (!fd.isValid()) { LOG(HAL, Fatal) << "No valid fd"; return nullptr; } planes[i].fd = fd; planes[i].offset = buf.offset(i); planes[i].length = buf.size(i); } return std::make_unique<HALFrameBuffer>(planes, camera3buffer); } int CameraDevice::processControls(Camera3RequestDescriptor *descriptor) { const CameraMetadata &settings = descriptor->settings_; if (!settings.isValid()) return 0; /* Translate the Android request settings to libcamera controls. */ ControlList &controls = descriptor->request_->controls(); camera_metadata_ro_entry_t entry; if (settings.getEntry(ANDROID_SCALER_CROP_REGION, &entry)) { const int32_t *data = entry.data.i32; Rectangle cropRegion{ data[0], data[1], static_cast<unsigned int>(data[2]), static_cast<unsigned int>(data[3]) }; controls.set(controls::ScalerCrop, cropRegion); } if (settings.getEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, &entry)) { const int32_t data = *entry.data.i32; int32_t testPatternMode = controls::draft::TestPatternModeOff; switch (data) { case ANDROID_SENSOR_TEST_PATTERN_MODE_OFF: testPatternMode = controls::draft::TestPatternModeOff; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR: testPatternMode = controls::draft::TestPatternModeSolidColor; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS: testPatternMode = controls::draft::TestPatternModeColorBars; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY: testPatternMode = controls::draft::TestPatternModeColorBarsFadeToGray; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_PN9: testPatternMode = controls::draft::TestPatternModePn9; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1: testPatternMode = controls::draft::TestPatternModeCustom1; break; default: LOG(HAL, Error) << "Unknown test pattern mode: " << data; return -EINVAL; } controls.set(controls::draft::TestPatternMode, testPatternMode); } return 0; } void CameraDevice::abortRequest(Camera3RequestDescriptor *descriptor) const { notifyError(descriptor->frameNumber_, nullptr, CAMERA3_MSG_ERROR_REQUEST); for (auto &buffer : descriptor->buffers_) buffer.status = Camera3RequestDescriptor::Status::Error; descriptor->status_ = Camera3RequestDescriptor::Status::Error; } bool CameraDevice::isValidRequest(camera3_capture_request_t *camera3Request) const { if (!camera3Request) { LOG(HAL, Error) << "No capture request provided"; return false; } if (!camera3Request->num_output_buffers || !camera3Request->output_buffers) { LOG(HAL, Error) << "No output buffers provided"; return false; } /* configureStreams() has not been called or has failed. */ if (streams_.empty() || !config_) { LOG(HAL, Error) << "No stream is configured"; return false; } for (uint32_t i = 0; i < camera3Request->num_output_buffers; i++) { const camera3_stream_buffer_t &outputBuffer = camera3Request->output_buffers[i]; if (!outputBuffer.buffer || !(*outputBuffer.buffer)) { LOG(HAL, Error) << "Invalid native handle"; return false; } const native_handle_t *handle = *outputBuffer.buffer; constexpr int kNativeHandleMaxFds = 1024; if (handle->numFds < 0 || handle->numFds > kNativeHandleMaxFds) { LOG(HAL, Error) << "Invalid number of fds (" << handle->numFds << ") in buffer " << i; return false; } constexpr int kNativeHandleMaxInts = 1024; if (handle->numInts < 0 || handle->numInts > kNativeHandleMaxInts) { LOG(HAL, Error) << "Invalid number of ints (" << handle->numInts << ") in buffer " << i; return false; } const camera3_stream *camera3Stream = outputBuffer.stream; if (!camera3Stream) return false; const CameraStream *cameraStream = static_cast<CameraStream *>(camera3Stream->priv); auto found = std::find_if(streams_.begin(), streams_.end(), [cameraStream](const CameraStream &stream) { return &stream == cameraStream; }); if (found == streams_.end()) { LOG(HAL, Error) << "No corresponding configured stream found"; return false; } } return true; } int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request) { if (!isValidRequest(camera3Request)) return -EINVAL; /* * Save the request descriptors for use at completion time. * The descriptor and the associated memory reserved here are freed * at request complete time. */ auto descriptor = std::make_unique<Camera3RequestDescriptor>(camera_.get(), camera3Request); /* * \todo The Android request model is incremental, settings passed in * previous requests are to be effective until overridden explicitly in * a new request. Do we need to cache settings incrementally here, or is * it handled by the Android camera service ? */ if (camera3Request->settings) lastSettings_ = camera3Request->settings; descriptor->settings_ = lastSettings_; LOG(HAL, Debug) << "Queueing request " << descriptor->request_->cookie() << " with " << descriptor->buffers_.size() << " streams"; /* * Process all the Direct and Internal streams first, they map directly * to a libcamera stream. Streams of type Mapped will be handled later. * * Collect the CameraStream associated to each requested capture stream. * Since requestedStreams is an std:set<>, no duplications can happen. */ std::set<CameraStream *> requestedStreams; for (const auto &[i, buffer] : utils::enumerate(descriptor->buffers_)) { CameraStream *cameraStream = buffer.stream; camera3_stream_t *camera3Stream = cameraStream->camera3Stream(); std::stringstream ss; ss << i << " - (" << camera3Stream->width << "x" << camera3Stream->height << ")" << "[" << utils::hex(camera3Stream->format) << "] -> " << "(" << cameraStream->configuration().size << ")[" << cameraStream->configuration().pixelFormat << "]"; /* * Inspect the camera stream type, create buffers opportunely * and add them to the Request if required. */ FrameBuffer *frameBuffer = nullptr; UniqueFD acquireFence; MutexLocker lock(descriptor->streamsProcessMutex_); switch (cameraStream->type()) { case CameraStream::Type::Mapped: /* Mapped streams will be handled in the next loop. */ continue; case CameraStream::Type::Direct: /* * Create a libcamera buffer using the dmabuf * descriptors of the camera3Buffer for each stream and * associate it with the Camera3RequestDescriptor for * lifetime management only. */ buffer.frameBuffer = createFrameBuffer(*buffer.camera3Buffer, cameraStream->configuration().pixelFormat, cameraStream->configuration().size); frameBuffer = buffer.frameBuffer.get(); acquireFence = std::move(buffer.fence); LOG(HAL, Debug) << ss.str() << " (direct)"; break; case CameraStream::Type::Internal: /* * Get the frame buffer from the CameraStream internal * buffer pool. * * The buffer has to be returned to the CameraStream * once it has been processed. */ frameBuffer = cameraStream->getBuffer(); buffer.internalBuffer = frameBuffer; LOG(HAL, Debug) << ss.str() << " (internal)"; descriptor->pendingStreamsToProcess_.insert( { cameraStream, &buffer }); break; } if (!frameBuffer) { LOG(HAL, Error) << "Failed to create frame buffer"; return -ENOMEM; } auto fence = std::make_unique<Fence>(std::move(acquireFence)); descriptor->request_->addBuffer(cameraStream->stream(), frameBuffer, std::move(fence)); requestedStreams.insert(cameraStream); } /* * Now handle the Mapped streams. If no buffer has been added for them * because their corresponding direct source stream is not part of this * particular request, add one here. */ for (const auto &[i, buffer] : utils::enumerate(descriptor->buffers_)) { CameraStream *cameraStream = buffer.stream; camera3_stream_t *camera3Stream = cameraStream->camera3Stream(); if (cameraStream->type() != CameraStream::Type::Mapped) continue; LOG(HAL, Debug) << i << " - (" << camera3Stream->width << "x" << camera3Stream->height << ")" << "[" << utils::hex(camera3Stream->format) << "] -> " << "(" << cameraStream->configuration().size << ")[" << cameraStream->configuration().pixelFormat << "]" << " (mapped)"; MutexLocker lock(descriptor->streamsProcessMutex_); descriptor->pendingStreamsToProcess_.insert({ cameraStream, &buffer }); /* * Make sure the CameraStream this stream is mapped on has been * added to the request. */ CameraStream *sourceStream = cameraStream->sourceStream(); ASSERT(sourceStream); if (requestedStreams.find(sourceStream) != requestedStreams.end()) continue; /* * If that's not the case, we need to add a buffer to the request * for this stream. */ FrameBuffer *frameBuffer = cameraStream->getBuffer(); buffer.internalBuffer = frameBuffer; descriptor->request_->addBuffer(sourceStream->stream(), frameBuffer, nullptr); requestedStreams.insert(sourceStream); } /* * Translate controls from Android to libcamera and queue the request * to the camera. */ int ret = processControls(descriptor.get()); if (ret) return ret; /* * If flush is in progress set the request status to error and place it * on the queue to be later completed. If the camera has been stopped we * have to re-start it to be able to process the request. */ MutexLocker stateLock(stateMutex_); if (state_ == State::Flushing) { Camera3RequestDescriptor *rawDescriptor = descriptor.get(); { MutexLocker descriptorsLock(descriptorsMutex_); descriptors_.push(std::move(descriptor)); } abortRequest(rawDescriptor); completeDescriptor(rawDescriptor); return 0; } if (state_ == State::Stopped) { lastSettings_ = {}; ret = camera_->start(); if (ret) { LOG(HAL, Error) << "Failed to start camera"; return ret; } state_ = State::Running; } Request *request = descriptor->request_.get(); { MutexLocker descriptorsLock(descriptorsMutex_); descriptors_.push(std::move(descriptor)); } camera_->queueRequest(request); return 0; } void CameraDevice::requestComplete(Request *request) { Camera3RequestDescriptor *descriptor = reinterpret_cast<Camera3RequestDescriptor *>(request->cookie()); /* * Prepare the capture result for the Android camera stack. * * The buffer status is set to Success and later changed to Error if * post-processing/compression fails. */ for (auto &buffer : descriptor->buffers_) { CameraStream *stream = buffer.stream; /* * Streams of type Direct have been queued to the * libcamera::Camera and their acquire fences have * already been waited on by the library. * * Acquire fences of streams of type Internal and Mapped * will be handled during post-processing. */ if (stream->type() == CameraStream::Type::Direct) { /* If handling of the fence has failed restore buffer.fence. */ std::unique_ptr<Fence> fence = buffer.frameBuffer->releaseFence(); if (fence) buffer.fence = fence->release(); } buffer.status = Camera3RequestDescriptor::Status::Success; } /* * If the Request has failed, abort the request by notifying the error * and complete the request with all buffers in error state. */ if (request->status() != Request::RequestComplete) { LOG(HAL, Error) << "Request " << request->cookie() << " not successfully completed: " << request->status(); abortRequest(descriptor); completeDescriptor(descriptor); return; } /* * Notify shutter as soon as we have verified we have a valid request. * * \todo The shutter event notification should be sent to the framework * as soon as possible, earlier than request completion time. */ uint64_t sensorTimestamp = static_cast<uint64_t>(request->metadata() .get(controls::SensorTimestamp) .value_or(0)); notifyShutter(descriptor->frameNumber_, sensorTimestamp); LOG(HAL, Debug) << "Request " << request->cookie() << " completed with " << descriptor->request_->buffers().size() << " streams"; /* * Generate the metadata associated with the captured buffers. * * Notify if the metadata generation has failed, but continue processing * buffers and return an empty metadata pack. */ descriptor->resultMetadata_ = getResultMetadata(*descriptor); if (!descriptor->resultMetadata_) { notifyError(descriptor->frameNumber_, nullptr, CAMERA3_MSG_ERROR_RESULT); /* * The camera framework expects an empty metadata pack on error. * * \todo Check that the post-processor code handles this situation * correctly. */ descriptor->resultMetadata_ = std::make_unique<CameraMetadata>(0, 0); } /* Handle post-processing. */ MutexLocker locker(descriptor->streamsProcessMutex_); /* * Queue all the post-processing streams request at once. The completion * slot streamProcessingComplete() can only execute when we are out * this critical section. This helps to handle synchronous errors here * itself. */ auto iter = descriptor->pendingStreamsToProcess_.begin(); while (iter != descriptor->pendingStreamsToProcess_.end()) { CameraStream *stream = iter->first; Camera3RequestDescriptor::StreamBuffer *buffer = iter->second; FrameBuffer *src = request->findBuffer(stream->stream()); if (!src) { LOG(HAL, Error) << "Failed to find a source stream buffer"; setBufferStatus(*buffer, Camera3RequestDescriptor::Status::Error); iter = descriptor->pendingStreamsToProcess_.erase(iter); continue; } buffer->srcBuffer = src; ++iter; int ret = stream->process(buffer); if (ret) { setBufferStatus(*buffer, Camera3RequestDescriptor::Status::Error); descriptor->pendingStreamsToProcess_.erase(stream); /* * If the framebuffer is internal to CameraStream return * it back now that we're done processing it. */ if (buffer->internalBuffer) stream->putBuffer(buffer->internalBuffer); } } if (descriptor->pendingStreamsToProcess_.empty()) { locker.unlock(); completeDescriptor(descriptor); } } /** * \brief Complete the Camera3RequestDescriptor * \param[in] descriptor The Camera3RequestDescriptor that has completed * * The function marks the Camera3RequestDescriptor as 'complete'. It shall be * called when all the streams in the Camera3RequestDescriptor have completed * capture (or have been generated via post-processing) and the request is ready * to be sent back to the framework. * * \context This function is \threadsafe. */ void CameraDevice::completeDescriptor(Camera3RequestDescriptor *descriptor) { MutexLocker lock(descriptorsMutex_); descriptor->complete_ = true; sendCaptureResults(); } /** * \brief Sequentially send capture results to the framework * * Iterate over the descriptors queue to send completed descriptors back to the * framework, in the same order as they have been queued. For each complete * descriptor, populate a locally-scoped camera3_capture_result_t from the * descriptor, send the capture result back by calling the * process_capture_result() callback, and remove the descriptor from the queue. * Stop iterating if the descriptor at the front of the queue is not complete. * * This function should never be called directly in the codebase. Use * completeDescriptor() instead. */ void CameraDevice::sendCaptureResults() { while (!descriptors_.empty() && !descriptors_.front()->isPending()) { auto descriptor = std::move(descriptors_.front()); descriptors_.pop(); camera3_capture_result_t captureResult = {}; captureResult.frame_number = descriptor->frameNumber_; if (descriptor->resultMetadata_) captureResult.result = descriptor->resultMetadata_->getMetadata(); std::vector<camera3_stream_buffer_t> resultBuffers; resultBuffers.reserve(descriptor->buffers_.size()); for (auto &buffer : descriptor->buffers_) { camera3_buffer_status status = CAMERA3_BUFFER_STATUS_ERROR; if (buffer.status == Camera3RequestDescriptor::Status::Success) status = CAMERA3_BUFFER_STATUS_OK; /* * Pass the buffer fence back to the camera framework as * a release fence. This instructs the framework to wait * on the acquire fence in case we haven't done so * ourselves for any reason. */ resultBuffers.push_back({ buffer.stream->camera3Stream(), buffer.camera3Buffer, status, -1, buffer.fence.release() }); } captureResult.num_output_buffers = resultBuffers.size(); captureResult.output_buffers = resultBuffers.data(); if (descriptor->status_ == Camera3RequestDescriptor::Status::Success) captureResult.partial_result = 1; callbacks_->process_capture_result(callbacks_, &captureResult); } } void CameraDevice::setBufferStatus(Camera3RequestDescriptor::StreamBuffer &streamBuffer, Camera3RequestDescriptor::Status status) { streamBuffer.status = status; if (status != Camera3RequestDescriptor::Status::Success) { notifyError(streamBuffer.request->frameNumber_, streamBuffer.stream->camera3Stream(), CAMERA3_MSG_ERROR_BUFFER); /* Also set error status on entire request descriptor. */ streamBuffer.request->status_ = Camera3RequestDescriptor::Status::Error; } } /** * \brief Handle post-processing completion of a stream in a capture request * \param[in] streamBuffer The StreamBuffer for which processing is complete * \param[in] status Stream post-processing status * * This function is called from the post-processor's thread whenever a camera * stream has finished post processing. The corresponding entry is dropped from * the descriptor's pendingStreamsToProcess_ map. * * If the pendingStreamsToProcess_ map is then empty, all streams requiring to * be generated from post-processing have been completed. Mark the descriptor as * complete using completeDescriptor() in that case. */ void CameraDevice::streamProcessingComplete(Camera3RequestDescriptor::StreamBuffer *streamBuffer, Camera3RequestDescriptor::Status status) { setBufferStatus(*streamBuffer, status); /* * If the framebuffer is internal to CameraStream return it back now * that we're done processing it. */ if (streamBuffer->internalBuffer) streamBuffer->stream->putBuffer(streamBuffer->internalBuffer); Camera3RequestDescriptor *request = streamBuffer->request; { MutexLocker locker(request->streamsProcessMutex_); request->pendingStreamsToProcess_.erase(streamBuffer->stream); if (!request->pendingStreamsToProcess_.empty()) return; } completeDescriptor(streamBuffer->request); } std::string CameraDevice::logPrefix() const { return "'" + camera_->id() + "'"; } void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp) { camera3_notify_msg_t notify = {}; notify.type = CAMERA3_MSG_SHUTTER; notify.message.shutter.frame_number = frameNumber; notify.message.shutter.timestamp = timestamp; callbacks_->notify(callbacks_, &notify); } void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream, camera3_error_msg_code code) const { camera3_notify_msg_t notify = {}; notify.type = CAMERA3_MSG_ERROR; notify.message.error.error_stream = stream; notify.message.error.frame_number = frameNumber; notify.message.error.error_code = code; callbacks_->notify(callbacks_, &notify); } /* * Produce a set of fixed result metadata. */ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) const { const ControlList &metadata = descriptor.request_->metadata(); const CameraMetadata &settings = descriptor.settings_; camera_metadata_ro_entry_t entry; bool found; /* * \todo Keep this in sync with the actual number of entries. * Currently: 40 entries, 156 bytes * * Reserve more space for the JPEG metadata set by the post-processor. * Currently: * ANDROID_JPEG_GPS_COORDINATES (double x 3) = 24 bytes * ANDROID_JPEG_GPS_PROCESSING_METHOD (byte x 32) = 32 bytes * ANDROID_JPEG_GPS_TIMESTAMP (int64) = 8 bytes * ANDROID_JPEG_SIZE (int32_t) = 4 bytes * ANDROID_JPEG_QUALITY (byte) = 1 byte * ANDROID_JPEG_ORIENTATION (int32_t) = 4 bytes * ANDROID_JPEG_THUMBNAIL_QUALITY (byte) = 1 byte * ANDROID_JPEG_THUMBNAIL_SIZE (int32 x 2) = 8 bytes * Total bytes for JPEG metadata: 82 */ std::unique_ptr<CameraMetadata> resultMetadata = std::make_unique<CameraMetadata>(88, 166); if (!resultMetadata->isValid()) { LOG(HAL, Error) << "Failed to allocate result metadata"; return nullptr; } /* * \todo The value of the results metadata copied from the settings * will have to be passed to the libcamera::Camera and extracted * from libcamera::Request::metadata. */ uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, value); value = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF; resultMetadata->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE, value); int32_t value32 = 0; resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, value32); value = ANDROID_CONTROL_AE_LOCK_OFF; resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, value); value = ANDROID_CONTROL_AE_MODE_ON; resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, value); if (settings.getEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry)) /* * \todo Retrieve the AE FPS range from the libcamera metadata. * As libcamera does not support that control, as a temporary * workaround return what the framework asked. */ resultMetadata->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, entry.data.i32, 2); found = settings.getEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry); value = found ? *entry.data.u8 : (uint8_t)ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; resultMetadata->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, value); value = ANDROID_CONTROL_AE_STATE_CONVERGED; resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, value); value = ANDROID_CONTROL_AF_MODE_OFF; resultMetadata->addEntry(ANDROID_CONTROL_AF_MODE, value); value = ANDROID_CONTROL_AF_STATE_INACTIVE; resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, value); value = ANDROID_CONTROL_AF_TRIGGER_IDLE; resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, value); value = ANDROID_CONTROL_AWB_MODE_AUTO; resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, value); value = ANDROID_CONTROL_AWB_LOCK_OFF; resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, value); value = ANDROID_CONTROL_AWB_STATE_CONVERGED; resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, value); value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; resultMetadata->addEntry(ANDROID_CONTROL_CAPTURE_INTENT, value); value = ANDROID_CONTROL_EFFECT_MODE_OFF; resultMetadata->addEntry(ANDROID_CONTROL_EFFECT_MODE, value); value = ANDROID_CONTROL_MODE_AUTO; resultMetadata->addEntry(ANDROID_CONTROL_MODE, value); value = ANDROID_CONTROL_SCENE_MODE_DISABLED; resultMetadata->addEntry(ANDROID_CONTROL_SCENE_MODE, value); value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, value); value = ANDROID_FLASH_MODE_OFF; resultMetadata->addEntry(ANDROID_FLASH_MODE, value); value = ANDROID_FLASH_STATE_UNAVAILABLE; resultMetadata->addEntry(ANDROID_FLASH_STATE, value); if (settings.getEntry(ANDROID_LENS_APERTURE, &entry)) resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1); float focal_length = 1.0; resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, focal_length); value = ANDROID_LENS_STATE_STATIONARY; resultMetadata->addEntry(ANDROID_LENS_STATE, value); value = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; resultMetadata->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, value); value32 = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, value32); value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, value); value = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, value); value = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; resultMetadata->addEntry(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, value); value = ANDROID_STATISTICS_SCENE_FLICKER_NONE; resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, value); value = ANDROID_NOISE_REDUCTION_MODE_OFF; resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, value); /* 33.3 msec */ const int64_t rolling_shutter_skew = 33300000; resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, rolling_shutter_skew); /* Add metadata tags reported by libcamera. */ const int64_t timestamp = metadata.get(controls::SensorTimestamp).value_or(0); resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, timestamp); const auto &pipelineDepth = metadata.get(controls::draft::PipelineDepth); if (pipelineDepth) resultMetadata->addEntry(ANDROID_REQUEST_PIPELINE_DEPTH, *pipelineDepth); const auto &exposureTime = metadata.get(controls::ExposureTime); if (exposureTime) resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, *exposureTime * 1000ULL); const auto &frameDuration = metadata.get(controls::FrameDuration); if (frameDuration) resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION, *frameDuration * 1000); const auto &scalerCrop = metadata.get(controls::ScalerCrop); if (scalerCrop) { const Rectangle &crop = *scalerCrop; int32_t cropRect[] = { crop.x, crop.y, static_cast<int32_t>(crop.width), static_cast<int32_t>(crop.height), }; resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, cropRect); } const auto &testPatternMode = metadata.get(controls::draft::TestPatternMode); if (testPatternMode) resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, *testPatternMode); /* * Return the result metadata pack even is not valid: get() will return * nullptr. */ if (!resultMetadata->isValid()) { LOG(HAL, Error) << "Failed to construct result metadata"; } if (resultMetadata->resized()) { auto [entryCount, dataCount] = resultMetadata->usage(); LOG(HAL, Info) << "Result metadata resized: " << entryCount << " entries and " << dataCount << " bytes used"; } return resultMetadata; }
0
repos/libcamera/src
repos/libcamera/src/android/camera_stream.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Camera HAL stream */ #pragma once #include <memory> #include <queue> #include <vector> #include <hardware/camera3.h> #include <libcamera/base/mutex.h> #include <libcamera/base/thread.h> #include <libcamera/camera.h> #include <libcamera/framebuffer.h> #include <libcamera/geometry.h> #include <libcamera/pixel_format.h> #include "camera_request.h" #include "post_processor.h" class CameraDevice; class PlatformFrameBufferAllocator; class CameraStream { public: /* * Enumeration of CameraStream types. * * A camera stream associates an Android stream to a libcamera stream. * This enumeration describes how the two streams are associated and how * and where data produced from libcamera are delivered to the * Android framework. * * Direct: * * The Android stream is directly mapped onto a libcamera stream: frames * are delivered by the library directly in the memory location * specified by the Android stream (buffer_handle_t->data) and provided * to the framework as they are. The Android stream characteristics are * directly translated to the libcamera stream configuration. * * +-----+ +-----+ * | A | | L | * +-----+ +-----+ * | | * V V * +-----+ +------+ * | B |<---------------| FB | * +-----+ +------+ * * * Internal: * * Data for the Android stream is produced by processing a libcamera * stream created by the HAL for that purpose. The libcamera stream * needs to be supplied with intermediate buffers where the library * delivers frames to be processed and then provided to the framework. * The libcamera stream configuration is not a direct translation of the * Android stream characteristics, but it describes the format and size * required for the processing procedure to produce frames in the * Android required format. * * +-----+ +-----+ * | A | | L | * +-----+ +-----+ * | | * V V * +-----+ +------+ * | B | | FB | * +-----+ +------+ * ^ | * |-------Processing------| * * * Mapped: * * Data for the Android stream is produced by processing a libcamera * stream associated with another CameraStream. Mapped camera streams do * not need any memory to be reserved for them as they process data * produced by libcamera for a different stream whose format and size * are compatible with the processing procedure requirements to produce * frames in the Android required format. * * +-----+ +-----+ +-----+ * | A | | A' | | L | * +-----+ +-----+ +-----+ * | | | * V V V * +-----+ +-----+ +------+ * | B | | B' |<---------| FB | * +-----+ +-----+ +------+ * ^ | * |--Processing--| * * * -------------------------------------------------------------------- * A = Android stream * L = libcamera stream * B = memory buffer * FB = libcamera FrameBuffer * "Processing" = Frame processing procedure (Encoding, scaling etc) */ enum class Type { Direct, Internal, Mapped, }; CameraStream(CameraDevice *const cameraDevice, libcamera::CameraConfiguration *config, Type type, camera3_stream_t *camera3Stream, CameraStream *const sourceStream, unsigned int index); CameraStream(CameraStream &&other); ~CameraStream(); Type type() const { return type_; } camera3_stream_t *camera3Stream() const { return camera3Stream_; } const libcamera::StreamConfiguration &configuration() const; libcamera::Stream *stream() const; CameraStream *sourceStream() const { return sourceStream_; } int configure(); int process(Camera3RequestDescriptor::StreamBuffer *streamBuffer); libcamera::FrameBuffer *getBuffer(); void putBuffer(libcamera::FrameBuffer *buffer); void flush(); private: class PostProcessorWorker : public libcamera::Thread { public: enum class State { Stopped, Running, Flushing, }; PostProcessorWorker(PostProcessor *postProcessor); ~PostProcessorWorker(); void start(); void queueRequest(Camera3RequestDescriptor::StreamBuffer *request); void flush(); protected: void run() override; private: PostProcessor *postProcessor_; libcamera::Mutex mutex_; libcamera::ConditionVariable cv_; std::queue<Camera3RequestDescriptor::StreamBuffer *> requests_ LIBCAMERA_TSA_GUARDED_BY(mutex_); State state_ LIBCAMERA_TSA_GUARDED_BY(mutex_) = State::Stopped; }; int waitFence(int fence); CameraDevice *const cameraDevice_; const libcamera::CameraConfiguration *config_; const Type type_; camera3_stream_t *camera3Stream_; CameraStream *const sourceStream_; const unsigned int index_; std::unique_ptr<PlatformFrameBufferAllocator> allocator_; std::vector<std::unique_ptr<libcamera::FrameBuffer>> allocatedBuffers_; std::vector<libcamera::FrameBuffer *> buffers_ LIBCAMERA_TSA_GUARDED_BY(mutex_); /* * The class has to be MoveConstructible as instances are stored in * an std::vector in CameraDevice. */ std::unique_ptr<libcamera::Mutex> mutex_; std::unique_ptr<PostProcessor> postProcessor_; std::unique_ptr<PostProcessorWorker> worker_; };
0
repos/libcamera/src
repos/libcamera/src/android/camera_hal_manager.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Android Camera Manager */ #include "camera_hal_manager.h" #include <libcamera/base/log.h> #include <libcamera/camera.h> #include <libcamera/property_ids.h> #include "camera_device.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) /* * \class CameraHalManager * * The HAL camera manager is initializated at camera_module_t 'hal_init()' time * and spawns its own thread where libcamera related events are dispatched to. * It wraps the libcamera CameraManager operations and provides helpers for the * camera_module_t operations, to retrieve the number of cameras in the system, * their static information and to open camera devices. */ CameraHalManager::CameraHalManager() : cameraManager_(nullptr), callbacks_(nullptr), numInternalCameras_(0), nextExternalCameraId_(firstExternalCameraId_) { } /* CameraManager calls stop() in the destructor. */ CameraHalManager::~CameraHalManager() = default; /* static */ CameraHalManager *CameraHalManager::instance() { static CameraHalManager *cameraHalManager = new CameraHalManager; return cameraHalManager; } int CameraHalManager::init() { cameraManager_ = std::make_unique<CameraManager>(); /* * If the configuration file is not available the HAL only supports * external cameras. If it exists but it's not valid then error out. */ if (halConfig_.exists() && !halConfig_.isValid()) { LOG(HAL, Error) << "HAL configuration file is not valid"; return -EINVAL; } /* Support camera hotplug. */ cameraManager_->cameraAdded.connect(this, &CameraHalManager::cameraAdded); cameraManager_->cameraRemoved.connect(this, &CameraHalManager::cameraRemoved); int ret = cameraManager_->start(); if (ret) { LOG(HAL, Error) << "Failed to start camera manager: " << strerror(-ret); cameraManager_.reset(); return ret; } return 0; } std::tuple<CameraDevice *, int> CameraHalManager::open(unsigned int id, const hw_module_t *hardwareModule) { MutexLocker locker(mutex_); if (!callbacks_) { LOG(HAL, Error) << "Can't open camera before callbacks are set"; return { nullptr, -ENODEV }; } CameraDevice *camera = cameraDeviceFromHalId(id); if (!camera) { LOG(HAL, Error) << "Invalid camera id '" << id << "'"; return { nullptr, -ENODEV }; } int ret = camera->open(hardwareModule); if (ret) return { nullptr, ret }; LOG(HAL, Info) << "Open camera '" << id << "'"; return { camera, 0 }; } void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam) { unsigned int id; bool isCameraExternal = false; bool isCameraNew = false; MutexLocker locker(mutex_); /* * Each camera is assigned a unique integer ID when it is seen for the * first time. If the camera has been seen before, the previous ID is * re-used. * * IDs starts from '0' for internal cameras and '1000' for external * cameras. */ auto iter = cameraIdsMap_.find(cam->id()); if (iter != cameraIdsMap_.end()) { id = iter->second; if (id >= firstExternalCameraId_) isCameraExternal = true; } else { isCameraNew = true; /* * Now check if this is an external camera and assign * its id accordingly. */ if (cameraLocation(cam.get()) == properties::CameraLocationExternal) { isCameraExternal = true; id = nextExternalCameraId_; } else { id = numInternalCameras_; } } /* * The configuration file must be valid, and contain a corresponding * entry for internal cameras. External cameras can be initialized * without configuration file. */ if (!isCameraExternal && !halConfig_.exists()) { LOG(HAL, Error) << "HAL configuration file is mandatory for internal cameras." << " Camera " << cam->id() << " failed to load"; return; } const CameraConfigData *cameraConfigData = halConfig_.cameraConfigData(cam->id()); /* * Some cameras whose location is reported by libcamera as external may * actually be internal to the device. This is common with UVC cameras * that are integrated in a laptop. In that case the real location * should be specified in the configuration file. * * If the camera location is external and a configuration entry exists * for it, override its location. */ if (isCameraNew && isCameraExternal) { if (cameraConfigData && cameraConfigData->facing != -1) { isCameraExternal = false; id = numInternalCameras_; } } if (!isCameraExternal && !cameraConfigData) { LOG(HAL, Error) << "HAL configuration entry for internal camera " << cam->id() << " is missing"; return; } /* Create a CameraDevice instance to wrap the libcamera Camera. */ std::unique_ptr<CameraDevice> camera = CameraDevice::create(id, cam); int ret = camera->initialize(cameraConfigData); if (ret) { LOG(HAL, Error) << "Failed to initialize camera: " << cam->id(); return; } if (isCameraNew) { cameraIdsMap_.emplace(cam->id(), id); if (isCameraExternal) nextExternalCameraId_++; else numInternalCameras_++; } cameras_.emplace_back(std::move(camera)); if (callbacks_) callbacks_->camera_device_status_change(callbacks_, id, CAMERA_DEVICE_STATUS_PRESENT); LOG(HAL, Debug) << "Camera ID: " << id << " added successfully."; } void CameraHalManager::cameraRemoved(std::shared_ptr<Camera> cam) { MutexLocker locker(mutex_); auto iter = std::find_if(cameras_.begin(), cameras_.end(), [&cam](const std::unique_ptr<CameraDevice> &camera) { return cam == camera->camera(); }); if (iter == cameras_.end()) return; /* * CAMERA_DEVICE_STATUS_NOT_PRESENT should be set for external cameras * only. */ unsigned int id = (*iter)->id(); if (id >= firstExternalCameraId_) callbacks_->camera_device_status_change(callbacks_, id, CAMERA_DEVICE_STATUS_NOT_PRESENT); /* * \todo Check if the camera is already open and running. * Inform the framework about its absence before deleting its * reference here. */ cameras_.erase(iter); LOG(HAL, Debug) << "Camera ID: " << id << " removed successfully."; } int32_t CameraHalManager::cameraLocation(const Camera *cam) { return cam->properties().get(properties::Location).value_or(-1); } CameraDevice *CameraHalManager::cameraDeviceFromHalId(unsigned int id) { auto iter = std::find_if(cameras_.begin(), cameras_.end(), [id](const std::unique_ptr<CameraDevice> &camera) { return camera->id() == id; }); if (iter == cameras_.end()) return nullptr; return iter->get(); } unsigned int CameraHalManager::numCameras() const { return numInternalCameras_; } int CameraHalManager::getCameraInfo(unsigned int id, struct camera_info *info) { if (!info) return -EINVAL; MutexLocker locker(mutex_); CameraDevice *camera = cameraDeviceFromHalId(id); if (!camera) { LOG(HAL, Error) << "Invalid camera id '" << id << "'"; return -EINVAL; } info->facing = camera->facing(); info->orientation = camera->orientation(); info->device_version = CAMERA_DEVICE_API_VERSION_3_3; info->resource_cost = 0; info->static_camera_characteristics = camera->getStaticMetadata(); info->conflicting_devices = nullptr; info->conflicting_devices_length = 0; return 0; } void CameraHalManager::setCallbacks(const camera_module_callbacks_t *callbacks) { callbacks_ = callbacks; MutexLocker locker(mutex_); /* * Some external cameras may have been identified before the callbacks_ * were set. Iterate all existing external cameras and mark them as * CAMERA_DEVICE_STATUS_PRESENT explicitly. * * Internal cameras are already assumed to be present at module load * time by the Android framework. */ for (const std::unique_ptr<CameraDevice> &camera : cameras_) { unsigned int id = camera->id(); if (id >= firstExternalCameraId_) callbacks_->camera_device_status_change(callbacks_, id, CAMERA_DEVICE_STATUS_PRESENT); } }
0
repos/libcamera/src
repos/libcamera/src/android/camera_request.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019-2021, Google Inc. * * libcamera Android Camera Request Descriptor */ #pragma once #include <map> #include <memory> #include <vector> #include <libcamera/base/class.h> #include <libcamera/base/mutex.h> #include <libcamera/base/unique_fd.h> #include <libcamera/camera.h> #include <libcamera/framebuffer.h> #include <hardware/camera3.h> #include "camera_metadata.h" #include "hal_framebuffer.h" class CameraBuffer; class CameraStream; class Camera3RequestDescriptor { public: enum class Status { Success, Error, }; struct StreamBuffer { StreamBuffer(CameraStream *stream, const camera3_stream_buffer_t &buffer, Camera3RequestDescriptor *request); ~StreamBuffer(); StreamBuffer(StreamBuffer &&); StreamBuffer &operator=(StreamBuffer &&); CameraStream *stream; buffer_handle_t *camera3Buffer; std::unique_ptr<HALFrameBuffer> frameBuffer; libcamera::UniqueFD fence; Status status = Status::Success; libcamera::FrameBuffer *internalBuffer = nullptr; const libcamera::FrameBuffer *srcBuffer = nullptr; std::unique_ptr<CameraBuffer> dstBuffer; Camera3RequestDescriptor *request; private: LIBCAMERA_DISABLE_COPY(StreamBuffer) }; /* Keeps track of streams requiring post-processing. */ std::map<CameraStream *, StreamBuffer *> pendingStreamsToProcess_ LIBCAMERA_TSA_GUARDED_BY(streamsProcessMutex_); libcamera::Mutex streamsProcessMutex_; Camera3RequestDescriptor(libcamera::Camera *camera, const camera3_capture_request_t *camera3Request); ~Camera3RequestDescriptor(); bool isPending() const { return !complete_; } uint32_t frameNumber_ = 0; std::vector<StreamBuffer> buffers_; CameraMetadata settings_; std::unique_ptr<libcamera::Request> request_; std::unique_ptr<CameraMetadata> resultMetadata_; bool complete_ = false; Status status_ = Status::Success; private: LIBCAMERA_DISABLE_COPY(Camera3RequestDescriptor) };
0
repos/libcamera/src
repos/libcamera/src/android/post_processor.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * CameraStream Post Processing Interface */ #pragma once #include <libcamera/base/signal.h> #include <libcamera/framebuffer.h> #include <libcamera/stream.h> #include "camera_buffer.h" #include "camera_request.h" class PostProcessor { public: enum class Status { Error, Success }; virtual ~PostProcessor() = default; virtual int configure(const libcamera::StreamConfiguration &inCfg, const libcamera::StreamConfiguration &outCfg) = 0; virtual void process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) = 0; libcamera::Signal<Camera3RequestDescriptor::StreamBuffer *, Status> processComplete; };
0
repos/libcamera/src/android
repos/libcamera/src/android/yuv/post_processor_yuv.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Post Processor using libyuv */ #include "post_processor_yuv.h" #include <libyuv/scale.h> #include <libcamera/base/log.h> #include <libcamera/formats.h> #include <libcamera/geometry.h> #include <libcamera/pixel_format.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/mapped_framebuffer.h" using namespace libcamera; LOG_DEFINE_CATEGORY(YUV) int PostProcessorYuv::configure(const StreamConfiguration &inCfg, const StreamConfiguration &outCfg) { if (inCfg.pixelFormat != outCfg.pixelFormat) { LOG(YUV, Error) << "Pixel format conversion is not supported" << " (from " << inCfg.pixelFormat << " to " << outCfg.pixelFormat << ")"; return -EINVAL; } if (inCfg.size < outCfg.size) { LOG(YUV, Error) << "Up-scaling is not supported" << " (from " << inCfg.size << " to " << outCfg.size << ")"; return -EINVAL; } if (inCfg.pixelFormat != formats::NV12) { LOG(YUV, Error) << "Unsupported format " << inCfg.pixelFormat << " (only NV12 is supported)"; return -EINVAL; } calculateLengths(inCfg, outCfg); return 0; } void PostProcessorYuv::process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) { const FrameBuffer &source = *streamBuffer->srcBuffer; CameraBuffer *destination = streamBuffer->dstBuffer.get(); if (!isValidBuffers(source, *destination)) { processComplete.emit(streamBuffer, PostProcessor::Status::Error); return; } const MappedFrameBuffer sourceMapped(&source, MappedFrameBuffer::MapFlag::Read); if (!sourceMapped.isValid()) { LOG(YUV, Error) << "Failed to mmap camera frame buffer"; processComplete.emit(streamBuffer, PostProcessor::Status::Error); return; } int ret = libyuv::NV12Scale(sourceMapped.planes()[0].data(), sourceStride_[0], sourceMapped.planes()[1].data(), sourceStride_[1], sourceSize_.width, sourceSize_.height, destination->plane(0).data(), destinationStride_[0], destination->plane(1).data(), destinationStride_[1], destinationSize_.width, destinationSize_.height, libyuv::FilterMode::kFilterBilinear); if (ret) { LOG(YUV, Error) << "Failed NV12 scaling: " << ret; processComplete.emit(streamBuffer, PostProcessor::Status::Error); return; } processComplete.emit(streamBuffer, PostProcessor::Status::Success); } bool PostProcessorYuv::isValidBuffers(const FrameBuffer &source, const CameraBuffer &destination) const { if (source.planes().size() != 2) { LOG(YUV, Error) << "Invalid number of source planes: " << source.planes().size(); return false; } if (destination.numPlanes() != 2) { LOG(YUV, Error) << "Invalid number of destination planes: " << destination.numPlanes(); return false; } if (source.planes()[0].length < sourceLength_[0] || source.planes()[1].length < sourceLength_[1]) { LOG(YUV, Error) << "The source planes lengths are too small, actual size: {" << source.planes()[0].length << ", " << source.planes()[1].length << "}, expected size: {" << sourceLength_[0] << ", " << sourceLength_[1] << "}"; return false; } if (destination.plane(0).size() < destinationLength_[0] || destination.plane(1).size() < destinationLength_[1]) { LOG(YUV, Error) << "The destination planes lengths are too small, actual size: {" << destination.plane(0).size() << ", " << destination.plane(1).size() << "}, expected size: {" << sourceLength_[0] << ", " << sourceLength_[1] << "}"; return false; } return true; } void PostProcessorYuv::calculateLengths(const StreamConfiguration &inCfg, const StreamConfiguration &outCfg) { sourceSize_ = inCfg.size; destinationSize_ = outCfg.size; const PixelFormatInfo &nv12Info = PixelFormatInfo::info(formats::NV12); for (unsigned int i = 0; i < 2; i++) { sourceStride_[i] = inCfg.stride; destinationStride_[i] = nv12Info.stride(destinationSize_.width, i, 1); sourceLength_[i] = nv12Info.planeSize(sourceSize_.height, i, sourceStride_[i]); destinationLength_[i] = nv12Info.planeSize(destinationSize_.height, i, destinationStride_[i]); } }
0
repos/libcamera/src/android
repos/libcamera/src/android/yuv/post_processor_yuv.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Post Processor using libyuv */ #pragma once #include "../post_processor.h" #include <libcamera/geometry.h> class PostProcessorYuv : public PostProcessor { public: PostProcessorYuv() = default; int configure(const libcamera::StreamConfiguration &incfg, const libcamera::StreamConfiguration &outcfg) override; void process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) override; private: bool isValidBuffers(const libcamera::FrameBuffer &source, const CameraBuffer &destination) const; void calculateLengths(const libcamera::StreamConfiguration &inCfg, const libcamera::StreamConfiguration &outCfg); libcamera::Size sourceSize_; libcamera::Size destinationSize_; unsigned int sourceLength_[2] = {}; unsigned int destinationLength_[2] = {}; unsigned int sourceStride_[2] = {}; unsigned int destinationStride_[2] = {}; };
0
repos/libcamera/src/android/data
repos/libcamera/src/android/data/soraka/camera_hal.yaml
# SPDX-License-Identifier: CC0-1.0 cameras: "\\_SB_.PCI0.I2C4.CAM1": location: front rotation: 0 "\\_SB_.PCI0.I2C2.CAM0": location: back rotation: 0
0
repos/libcamera/src/android/data
repos/libcamera/src/android/data/nautilus/camera_hal.yaml
# SPDX-License-Identifier: CC0-1.0 cameras: "\\_SB_.PCI0.I2C2.CAM0": location: back rotation: 0 "\\_SB_.PCI0.XHCI.RHUB.HS09-9:1.0-04f2:b647": location: front rotation: 0
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/thumbnailer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Simple image thumbnailer */ #include "thumbnailer.h" #include <libcamera/base/log.h> #include <libcamera/formats.h> #include "libcamera/internal/mapped_framebuffer.h" using namespace libcamera; LOG_DEFINE_CATEGORY(Thumbnailer) Thumbnailer::Thumbnailer() : valid_(false) { } void Thumbnailer::configure(const Size &sourceSize, PixelFormat pixelFormat) { sourceSize_ = sourceSize; pixelFormat_ = pixelFormat; if (pixelFormat_ != formats::NV12) { LOG(Thumbnailer, Error) << "Failed to configure: Pixel Format " << pixelFormat_ << " unsupported."; return; } valid_ = true; } void Thumbnailer::createThumbnail(const FrameBuffer &source, const Size &targetSize, std::vector<unsigned char> *destination) { MappedFrameBuffer frame(&source, MappedFrameBuffer::MapFlag::Read); if (!frame.isValid()) { LOG(Thumbnailer, Error) << "Failed to map FrameBuffer : " << strerror(frame.error()); return; } if (!valid_) { LOG(Thumbnailer, Error) << "Config is unconfigured or invalid."; return; } const unsigned int sw = sourceSize_.width; const unsigned int sh = sourceSize_.height; const unsigned int tw = targetSize.width; const unsigned int th = targetSize.height; ASSERT(frame.planes().size() == 2); ASSERT(tw % 2 == 0 && th % 2 == 0); /* Image scaling block implementing nearest-neighbour algorithm. */ unsigned char *src = frame.planes()[0].data(); unsigned char *srcC = frame.planes()[1].data(); unsigned char *srcCb, *srcCr; unsigned char *dstY, *srcY; size_t dstSize = (th * tw) + ((th / 2) * tw); destination->resize(dstSize); unsigned char *dst = destination->data(); unsigned char *dstC = dst + th * tw; for (unsigned int y = 0; y < th; y += 2) { unsigned int sourceY = (sh * y + th / 2) / th; dstY = dst + y * tw; srcY = src + sw * sourceY; srcCb = srcC + (sourceY / 2) * sw + 0; srcCr = srcC + (sourceY / 2) * sw + 1; for (unsigned int x = 0; x < tw; x += 2) { unsigned int sourceX = (sw * x + tw / 2) / tw; dstY[x] = srcY[sourceX]; dstY[tw + x] = srcY[sw + sourceX]; dstY[x + 1] = srcY[sourceX + 1]; dstY[tw + x + 1] = srcY[sw + sourceX + 1]; dstC[(y / 2) * tw + x + 0] = srcCb[(sourceX / 2) * 2]; dstC[(y / 2) * tw + x + 1] = srcCr[(sourceX / 2) * 2]; } } }
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/thumbnailer.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Simple image thumbnailer */ #pragma once #include <libcamera/framebuffer.h> #include <libcamera/geometry.h> #include "libcamera/internal/formats.h" class Thumbnailer { public: Thumbnailer(); void configure(const libcamera::Size &sourceSize, libcamera::PixelFormat pixelFormat); void createThumbnail(const libcamera::FrameBuffer &source, const libcamera::Size &targetSize, std::vector<unsigned char> *dest); const libcamera::PixelFormat &pixelFormat() const { return pixelFormat_; } private: libcamera::PixelFormat pixelFormat_; libcamera::Size sourceSize_; bool valid_; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/encoder_libjpeg.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * JPEG encoding using libjpeg */ #pragma once #include "encoder.h" #include <vector> #include "libcamera/internal/formats.h" #include <jpeglib.h> class EncoderLibJpeg : public Encoder { public: EncoderLibJpeg(); ~EncoderLibJpeg(); int configure(const libcamera::StreamConfiguration &cfg) override; int encode(Camera3RequestDescriptor::StreamBuffer *buffer, libcamera::Span<const uint8_t> exifData, unsigned int quality) override; int encode(const std::vector<libcamera::Span<uint8_t>> &planes, libcamera::Span<uint8_t> destination, libcamera::Span<const uint8_t> exifData, unsigned int quality); private: void compressRGB(const std::vector<libcamera::Span<uint8_t>> &planes); void compressNV(const std::vector<libcamera::Span<uint8_t>> &planes); struct jpeg_compress_struct compress_; struct jpeg_error_mgr jerr_; const libcamera::PixelFormatInfo *pixelFormatInfo_; bool nv_; bool nvSwap_; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/exif.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * EXIF tag creator using libexif */ #pragma once #include <chrono> #include <string> #include <time.h> #include <vector> #include <libexif/exif-data.h> #include <libcamera/base/span.h> #include <libcamera/geometry.h> class Exif { public: Exif(); ~Exif(); enum Compression { None = 1, JPEG = 6, }; enum Flash { /* bit 0 */ Fired = 0x01, /* bits 1 and 2 */ StrobeDetected = 0x04, StrobeNotDetected = 0x06, /* bits 3 and 4 */ ModeCompulsoryFiring = 0x08, ModeCompulsorySuppression = 0x10, ModeAuto = 0x18, /* bit 5 */ FlashNotPresent = 0x20, /* bit 6 */ RedEye = 0x40, }; enum WhiteBalance { Auto = 0, Manual = 1, }; enum StringEncoding { NoEncoding = 0, ASCII = 1, Unicode = 2, }; void setMake(const std::string &make); void setModel(const std::string &model); void setOrientation(int orientation); void setSize(const libcamera::Size &size); void setThumbnail(std::vector<unsigned char> &&thumbnail, Compression compression); void setTimestamp(time_t timestamp, std::chrono::milliseconds msec); void setGPSDateTimestamp(time_t timestamp); void setGPSLocation(const double *coords); void setGPSMethod(const std::string &method); void setFocalLength(float length); void setExposureTime(uint64_t nsec); void setAperture(float size); void setISO(uint16_t iso); void setFlash(Flash flash); void setWhiteBalance(WhiteBalance wb); libcamera::Span<const uint8_t> data() const { return { exifData_, size_ }; } [[nodiscard]] int generate(); private: ExifEntry *createEntry(ExifIfd ifd, ExifTag tag); ExifEntry *createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format, unsigned long components, unsigned int size); void setByte(ExifIfd ifd, ExifTag tag, uint8_t item); void setShort(ExifIfd ifd, ExifTag tag, uint16_t item); void setLong(ExifIfd ifd, ExifTag tag, uint32_t item); void setString(ExifIfd ifd, ExifTag tag, ExifFormat format, const std::string &item, StringEncoding encoding = NoEncoding); void setRational(ExifIfd ifd, ExifTag tag, ExifRational item); void setRational(ExifIfd ifd, ExifTag tag, libcamera::Span<const ExifRational> items); std::tuple<int, int, int> degreesToDMS(double decimalDegrees); void setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec); std::u16string utf8ToUtf16(const std::string &str); bool valid_; ExifData *data_; ExifMem *mem_; ExifByteOrder order_; unsigned char *exifData_; unsigned int size_; std::vector<unsigned char> thumbnailData_; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/post_processor_jpeg.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * JPEG Post Processor */ #pragma once #include "../post_processor.h" #include "encoder_libjpeg.h" #include "thumbnailer.h" #include <libcamera/geometry.h> class CameraDevice; class PostProcessorJpeg : public PostProcessor { public: PostProcessorJpeg(CameraDevice *const device); int configure(const libcamera::StreamConfiguration &incfg, const libcamera::StreamConfiguration &outcfg) override; void process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) override; private: void generateThumbnail(const libcamera::FrameBuffer &source, const libcamera::Size &targetSize, unsigned int quality, std::vector<unsigned char> *thumbnail); CameraDevice *const cameraDevice_; std::unique_ptr<Encoder> encoder_; libcamera::Size streamSize_; EncoderLibJpeg thumbnailEncoder_; Thumbnailer thumbnailer_; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/exif.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * EXIF tag creation using libexif */ #include "exif.h" #include <cmath> #include <iomanip> #include <map> #include <sstream> #include <tuple> #include <uchar.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> using namespace libcamera; LOG_DEFINE_CATEGORY(EXIF) /* * List of EXIF tags that we set directly because they are not supported * by libexif version 0.6.21. */ enum class _ExifTag { OFFSET_TIME = 0x9010, OFFSET_TIME_ORIGINAL = 0x9011, OFFSET_TIME_DIGITIZED = 0x9012, }; /* * The Exif class should be instantiated and specific properties set * through the exposed public API. * * Once all desired properties have been set, the user shall call * generate() to process the entries and generate the Exif data. * * Calls to generate() must check the return code to determine if any error * occurred during the construction of the Exif data, and if successful the * data can be obtained using the data() function. */ Exif::Exif() : valid_(false), data_(nullptr), order_(EXIF_BYTE_ORDER_INTEL), exifData_(0), size_(0) { /* Create an ExifMem allocator to construct entries. */ mem_ = exif_mem_new_default(); if (!mem_) { LOG(EXIF, Error) << "Failed to allocate ExifMem Allocator"; return; } data_ = exif_data_new_mem(mem_); if (!data_) { LOG(EXIF, Error) << "Failed to allocate an ExifData structure"; return; } valid_ = true; exif_data_set_option(data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION); exif_data_set_data_type(data_, EXIF_DATA_TYPE_COMPRESSED); /* * Big-Endian: EXIF_BYTE_ORDER_MOTOROLA * Little Endian: EXIF_BYTE_ORDER_INTEL */ exif_data_set_byte_order(data_, order_); setString(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION, EXIF_FORMAT_UNDEFINED, "0231"); /* Create the mandatory EXIF fields with default data. */ exif_data_fix(data_); } Exif::~Exif() { if (exifData_) free(exifData_); if (data_) { /* * Reset thumbnail data to avoid getting double-freed by * libexif. It is owned by the caller (i.e. PostProcessorJpeg). */ data_->data = nullptr; data_->size = 0; exif_data_unref(data_); } if (mem_) exif_mem_unref(mem_); } ExifEntry *Exif::createEntry(ExifIfd ifd, ExifTag tag) { ExifContent *content = data_->ifd[ifd]; ExifEntry *entry = exif_content_get_entry(content, tag); if (entry) { exif_entry_ref(entry); return entry; } entry = exif_entry_new_mem(mem_); if (!entry) { LOG(EXIF, Error) << "Failed to allocated new entry"; valid_ = false; return nullptr; } exif_content_add_entry(content, entry); exif_entry_initialize(entry, tag); return entry; } ExifEntry *Exif::createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format, unsigned long components, unsigned int size) { ExifContent *content = data_->ifd[ifd]; /* Replace any existing entry with the same tag. */ ExifEntry *existing = exif_content_get_entry(content, tag); exif_content_remove_entry(content, existing); ExifEntry *entry = exif_entry_new_mem(mem_); if (!entry) { LOG(EXIF, Error) << "Failed to allocated new entry"; valid_ = false; return nullptr; } void *buffer = exif_mem_alloc(mem_, size); if (!buffer) { LOG(EXIF, Error) << "Failed to allocate buffer for variable entry"; exif_mem_unref(mem_); valid_ = false; return nullptr; } entry->data = static_cast<unsigned char *>(buffer); entry->components = components; entry->format = format; entry->size = size; entry->tag = tag; exif_content_add_entry(content, entry); return entry; } void Exif::setByte(ExifIfd ifd, ExifTag tag, uint8_t item) { ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_BYTE, 1, 1); if (!entry) return; entry->data[0] = item; exif_entry_unref(entry); } void Exif::setShort(ExifIfd ifd, ExifTag tag, uint16_t item) { ExifEntry *entry = createEntry(ifd, tag); if (!entry) return; exif_set_short(entry->data, order_, item); exif_entry_unref(entry); } void Exif::setLong(ExifIfd ifd, ExifTag tag, uint32_t item) { ExifEntry *entry = createEntry(ifd, tag); if (!entry) return; exif_set_long(entry->data, order_, item); exif_entry_unref(entry); } void Exif::setRational(ExifIfd ifd, ExifTag tag, ExifRational item) { setRational(ifd, tag, { &item, 1 }); } void Exif::setRational(ExifIfd ifd, ExifTag tag, Span<const ExifRational> items) { ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_RATIONAL, items.size(), items.size() * sizeof(ExifRational)); if (!entry) return; for (size_t i = 0; i < items.size(); i++) exif_set_rational(entry->data + i * sizeof(ExifRational), order_, items[i]); exif_entry_unref(entry); } static const std::map<Exif::StringEncoding, std::array<uint8_t, 8>> stringEncodingCodes = { { Exif::ASCII, { 0x41, 0x53, 0x43, 0x49, 0x49, 0x00, 0x00, 0x00 } }, { Exif::Unicode, { 0x55, 0x4e, 0x49, 0x43, 0x4f, 0x44, 0x45, 0x00 } }, }; void Exif::setString(ExifIfd ifd, ExifTag tag, ExifFormat format, const std::string &item, StringEncoding encoding) { std::string ascii; size_t length; const char *str; std::vector<uint8_t> buf; if (format == EXIF_FORMAT_ASCII) { ascii = utils::toAscii(item); str = ascii.c_str(); /* Pad 1 extra byte to null-terminate the ASCII string. */ length = ascii.length() + 1; } else { std::u16string u16str; auto encodingString = stringEncodingCodes.find(encoding); if (encodingString != stringEncodingCodes.end()) { buf = { encodingString->second.begin(), encodingString->second.end() }; } switch (encoding) { case Unicode: u16str = utf8ToUtf16(item); buf.resize(8 + u16str.size() * 2); for (size_t i = 0; i < u16str.size(); i++) { if (order_ == EXIF_BYTE_ORDER_INTEL) { buf[8 + 2 * i] = u16str[i] & 0xff; buf[8 + 2 * i + 1] = (u16str[i] >> 8) & 0xff; } else { buf[8 + 2 * i] = (u16str[i] >> 8) & 0xff; buf[8 + 2 * i + 1] = u16str[i] & 0xff; } } break; case ASCII: case NoEncoding: buf.insert(buf.end(), item.begin(), item.end()); break; } str = reinterpret_cast<const char *>(buf.data()); /* * Strings stored in different formats (EXIF_FORMAT_UNDEFINED) * are not null-terminated. */ length = buf.size(); } ExifEntry *entry = createEntry(ifd, tag, format, length, length); if (!entry) return; memcpy(entry->data, str, length); exif_entry_unref(entry); } void Exif::setMake(const std::string &make) { setString(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make); } void Exif::setModel(const std::string &model) { setString(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model); } void Exif::setSize(const Size &size) { setLong(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_Y_DIMENSION, size.height); setLong(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, size.width); } void Exif::setTimestamp(time_t timestamp, std::chrono::milliseconds msec) { struct tm tm; localtime_r(&timestamp, &tm); char str[20]; strftime(str, sizeof(str), "%Y:%m:%d %H:%M:%S", &tm); std::string ts(str); setString(EXIF_IFD_0, EXIF_TAG_DATE_TIME, EXIF_FORMAT_ASCII, ts); setString(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_FORMAT_ASCII, ts); setString(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_FORMAT_ASCII, ts); /* Query and set timezone information if available. */ int r = strftime(str, sizeof(str), "%z", &tm); if (r <= 0) return; std::string tz(str); tz.insert(3, 1, ':'); setString(EXIF_IFD_EXIF, static_cast<ExifTag>(_ExifTag::OFFSET_TIME), EXIF_FORMAT_ASCII, tz); setString(EXIF_IFD_EXIF, static_cast<ExifTag>(_ExifTag::OFFSET_TIME_ORIGINAL), EXIF_FORMAT_ASCII, tz); setString(EXIF_IFD_EXIF, static_cast<ExifTag>(_ExifTag::OFFSET_TIME_DIGITIZED), EXIF_FORMAT_ASCII, tz); std::stringstream sstr; sstr << std::setfill('0') << std::setw(3) << msec.count(); std::string subsec = sstr.str(); setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME, EXIF_FORMAT_ASCII, subsec); setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_FORMAT_ASCII, subsec); setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_FORMAT_ASCII, subsec); } void Exif::setGPSDateTimestamp(time_t timestamp) { struct tm tm; gmtime_r(&timestamp, &tm); char str[11]; strftime(str, sizeof(str), "%Y:%m:%d", &tm); std::string tsStr(str); setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_DATE_STAMP), EXIF_FORMAT_ASCII, tsStr); /* Set GPS_TIME_STAMP */ ExifRational ts[] = { { static_cast<ExifLong>(tm.tm_hour), 1 }, { static_cast<ExifLong>(tm.tm_min), 1 }, { static_cast<ExifLong>(tm.tm_sec), 1 }, }; setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_TIME_STAMP), ts); } std::tuple<int, int, int> Exif::degreesToDMS(double decimalDegrees) { int degrees = std::trunc(decimalDegrees); double minutes = std::abs((decimalDegrees - degrees) * 60); double seconds = (minutes - std::trunc(minutes)) * 60; return { degrees, std::trunc(minutes), std::round(seconds) }; } void Exif::setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec) { ExifRational coords[] = { { static_cast<ExifLong>(deg), 1 }, { static_cast<ExifLong>(min), 1 }, { static_cast<ExifLong>(sec), 1 }, }; setRational(ifd, tag, coords); } /* * \brief Set GPS location (lat, long, alt) * \param[in] coords Pointer to coordinates latitude, longitude, and altitude, * first two in degrees, the third in meters */ void Exif::setGPSLocation(const double *coords) { int deg, min, sec; std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[0]); setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE_REF), EXIF_FORMAT_ASCII, deg >= 0 ? "N" : "S"); setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE), std::abs(deg), min, sec); std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[1]); setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE_REF), EXIF_FORMAT_ASCII, deg >= 0 ? "E" : "W"); setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE), std::abs(deg), min, sec); setByte(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE_REF), coords[2] >= 0 ? 0 : 1); setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE), ExifRational{ static_cast<ExifLong>(std::abs(coords[2])), 1 }); } void Exif::setGPSMethod(const std::string &method) { setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_PROCESSING_METHOD), EXIF_FORMAT_UNDEFINED, method, NoEncoding); } void Exif::setOrientation(int orientation) { int value; switch (orientation) { case 0: default: value = 1; break; case 90: value = 6; break; case 180: value = 3; break; case 270: value = 8; break; } setShort(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value); } void Exif::setThumbnail(std::vector<unsigned char> &&thumbnail, Compression compression) { thumbnailData_ = std::move(thumbnail); data_->data = thumbnailData_.data(); data_->size = thumbnailData_.size(); setShort(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression); } void Exif::setFocalLength(float length) { ExifRational rational = { static_cast<ExifLong>(length * 1000), 1000 }; setRational(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, rational); } void Exif::setExposureTime(uint64_t nsec) { ExifRational rational = { static_cast<ExifLong>(nsec), 1000000000 }; setRational(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, rational); } void Exif::setAperture(float size) { ExifRational rational = { static_cast<ExifLong>(size * 10000), 10000 }; setRational(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, rational); } void Exif::setISO(uint16_t iso) { setShort(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso); } void Exif::setFlash(Flash flash) { setShort(EXIF_IFD_EXIF, EXIF_TAG_FLASH, static_cast<ExifShort>(flash)); } void Exif::setWhiteBalance(WhiteBalance wb) { setShort(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, static_cast<ExifShort>(wb)); } /** * \brief Convert UTF-8 string to UTF-16 string * \param[in] str String to convert * * \return \a str in UTF-16 */ std::u16string Exif::utf8ToUtf16(const std::string &str) { mbstate_t state{}; char16_t c16; const char *ptr = str.data(); const char *end = ptr + str.size(); std::u16string ret; while (size_t rc = mbrtoc16(&c16, ptr, end - ptr + 1, &state)) { if (rc == static_cast<size_t>(-2) || rc == static_cast<size_t>(-1)) break; ret.push_back(c16); if (rc > 0) ptr += rc; } return ret; } [[nodiscard]] int Exif::generate() { if (exifData_) { free(exifData_); exifData_ = nullptr; } if (!valid_) { LOG(EXIF, Error) << "Generated EXIF data is invalid"; return -1; } exif_data_save_data(data_, &exifData_, &size_); LOG(EXIF, Debug) << "Created EXIF instance (" << size_ << " bytes)"; return 0; }
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/encoder_jea.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * JPEG encoding using CrOS JEA */ #pragma once #include <libcamera/geometry.h> #include <cros-camera/jpeg_compressor.h> #include "encoder.h" class EncoderJea : public Encoder { public: EncoderJea(); ~EncoderJea(); int configure(const libcamera::StreamConfiguration &cfg) override; int encode(Camera3RequestDescriptor::StreamBuffer *buffer, libcamera::Span<const uint8_t> exifData, unsigned int quality) override; private: libcamera::Size size_; std::unique_ptr<cros::JpegCompressor> jpegCompressor_; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/encoder_jea.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * JPEG encoding using CrOS JEA */ #include "encoder_jea.h" #include "libcamera/internal/mapped_framebuffer.h" #include <cros-camera/camera_mojo_channel_manager_token.h> #include "../cros_mojo_token.h" #include "../hal_framebuffer.h" EncoderJea::EncoderJea() = default; EncoderJea::~EncoderJea() = default; int EncoderJea::configure(const libcamera::StreamConfiguration &cfg) { size_ = cfg.size; if (jpegCompressor_) return 0; if (gCrosMojoToken == nullptr) return -ENOTSUP; jpegCompressor_ = cros::JpegCompressor::GetInstance(gCrosMojoToken); return 0; } int EncoderJea::encode(Camera3RequestDescriptor::StreamBuffer *buffer, libcamera::Span<const uint8_t> exifData, unsigned int quality) { if (!jpegCompressor_) return -ENOTSUP; uint32_t outDataSize = 0; const HALFrameBuffer *fb = dynamic_cast<const HALFrameBuffer *>(buffer->srcBuffer); if (!jpegCompressor_->CompressImageFromHandle(fb->handle(), *buffer->camera3Buffer, size_.width, size_.height, quality, exifData.data(), exifData.size(), &outDataSize)) return -EBUSY; return outDataSize; }
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/post_processor_jpeg.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * JPEG Post Processor */ #include "post_processor_jpeg.h" #include <chrono> #include "../camera_device.h" #include "../camera_metadata.h" #include "../camera_request.h" #if defined(OS_CHROMEOS) #include "encoder_jea.h" #else /* !defined(OS_CHROMEOS) */ #include "encoder_libjpeg.h" #endif #include "exif.h" #include <libcamera/base/log.h> #include <libcamera/formats.h> using namespace libcamera; using namespace std::chrono_literals; LOG_DEFINE_CATEGORY(JPEG) PostProcessorJpeg::PostProcessorJpeg(CameraDevice *const device) : cameraDevice_(device) { } int PostProcessorJpeg::configure(const StreamConfiguration &inCfg, const StreamConfiguration &outCfg) { if (inCfg.size != outCfg.size) { LOG(JPEG, Error) << "Mismatch of input and output stream sizes"; return -EINVAL; } if (outCfg.pixelFormat != formats::MJPEG) { LOG(JPEG, Error) << "Output stream pixel format is not JPEG"; return -EINVAL; } streamSize_ = outCfg.size; thumbnailer_.configure(inCfg.size, inCfg.pixelFormat); #if defined(OS_CHROMEOS) encoder_ = std::make_unique<EncoderJea>(); #else /* !defined(OS_CHROMEOS) */ encoder_ = std::make_unique<EncoderLibJpeg>(); #endif return encoder_->configure(inCfg); } void PostProcessorJpeg::generateThumbnail(const FrameBuffer &source, const Size &targetSize, unsigned int quality, std::vector<unsigned char> *thumbnail) { /* Stores the raw scaled-down thumbnail bytes. */ std::vector<unsigned char> rawThumbnail; thumbnailer_.createThumbnail(source, targetSize, &rawThumbnail); StreamConfiguration thCfg; thCfg.size = targetSize; thCfg.pixelFormat = thumbnailer_.pixelFormat(); int ret = thumbnailEncoder_.configure(thCfg); if (!rawThumbnail.empty() && !ret) { /* * \todo Avoid value-initialization of all elements of the * vector. */ thumbnail->resize(rawThumbnail.size()); /* * Split planes manually as the encoder expects a vector of * planes. * * \todo Pass a vector of planes directly to * Thumbnailer::createThumbnailer above and remove the manual * planes split from here. */ std::vector<Span<uint8_t>> thumbnailPlanes; const PixelFormatInfo &formatNV12 = PixelFormatInfo::info(formats::NV12); size_t yPlaneSize = formatNV12.planeSize(targetSize, 0); size_t uvPlaneSize = formatNV12.planeSize(targetSize, 1); thumbnailPlanes.push_back({ rawThumbnail.data(), yPlaneSize }); thumbnailPlanes.push_back({ rawThumbnail.data() + yPlaneSize, uvPlaneSize }); int jpeg_size = thumbnailEncoder_.encode(thumbnailPlanes, *thumbnail, {}, quality); thumbnail->resize(jpeg_size); LOG(JPEG, Debug) << "Thumbnail compress returned " << jpeg_size << " bytes"; } } void PostProcessorJpeg::process(Camera3RequestDescriptor::StreamBuffer *streamBuffer) { ASSERT(encoder_); const FrameBuffer &source = *streamBuffer->srcBuffer; CameraBuffer *destination = streamBuffer->dstBuffer.get(); ASSERT(destination->numPlanes() == 1); const CameraMetadata &requestMetadata = streamBuffer->request->settings_; CameraMetadata *resultMetadata = streamBuffer->request->resultMetadata_.get(); camera_metadata_ro_entry_t entry; int ret; /* Set EXIF metadata for various tags. */ Exif exif; exif.setMake(cameraDevice_->maker()); exif.setModel(cameraDevice_->model()); ret = requestMetadata.getEntry(ANDROID_JPEG_ORIENTATION, &entry); const uint32_t jpegOrientation = ret ? *entry.data.i32 : 0; resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION, jpegOrientation); exif.setOrientation(jpegOrientation); exif.setSize(streamSize_); /* * We set the frame's EXIF timestamp as the time of encode. * Since the precision we need for EXIF timestamp is only one * second, it is good enough. */ exif.setTimestamp(std::time(nullptr), 0ms); ret = resultMetadata->getEntry(ANDROID_SENSOR_EXPOSURE_TIME, &entry); exif.setExposureTime(ret ? *entry.data.i64 : 0); ret = requestMetadata.getEntry(ANDROID_LENS_APERTURE, &entry); if (ret) exif.setAperture(*entry.data.f); ret = resultMetadata->getEntry(ANDROID_SENSOR_SENSITIVITY, &entry); exif.setISO(ret ? *entry.data.i32 : 100); exif.setFlash(Exif::Flash::FlashNotPresent); exif.setWhiteBalance(Exif::WhiteBalance::Auto); exif.setFocalLength(1.0); ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_TIMESTAMP, &entry); if (ret) { exif.setGPSDateTimestamp(*entry.data.i64); resultMetadata->addEntry(ANDROID_JPEG_GPS_TIMESTAMP, *entry.data.i64); } ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_SIZE, &entry); if (ret) { const int32_t *data = entry.data.i32; Size thumbnailSize = { static_cast<uint32_t>(data[0]), static_cast<uint32_t>(data[1]) }; ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, &entry); uint8_t quality = ret ? *entry.data.u8 : 95; resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, quality); if (thumbnailSize != Size(0, 0)) { std::vector<unsigned char> thumbnail; generateThumbnail(source, thumbnailSize, quality, &thumbnail); if (!thumbnail.empty()) exif.setThumbnail(std::move(thumbnail), Exif::Compression::JPEG); } resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_SIZE, data, 2); } ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_COORDINATES, &entry); if (ret) { exif.setGPSLocation(entry.data.d); resultMetadata->addEntry(ANDROID_JPEG_GPS_COORDINATES, entry.data.d, 3); } ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry); if (ret) { std::string method(entry.data.u8, entry.data.u8 + entry.count); exif.setGPSMethod(method); resultMetadata->addEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD, entry.data.u8, entry.count); } if (exif.generate() != 0) LOG(JPEG, Error) << "Failed to generate valid EXIF data"; ret = requestMetadata.getEntry(ANDROID_JPEG_QUALITY, &entry); const uint8_t quality = ret ? *entry.data.u8 : 95; resultMetadata->addEntry(ANDROID_JPEG_QUALITY, quality); int jpeg_size = encoder_->encode(streamBuffer, exif.data(), quality); if (jpeg_size < 0) { LOG(JPEG, Error) << "Failed to encode stream image"; processComplete.emit(streamBuffer, PostProcessor::Status::Error); return; } /* Fill in the JPEG blob header. */ uint8_t *resultPtr = destination->plane(0).data() + destination->jpegBufferSize(cameraDevice_->maxJpegBufferSize()) - sizeof(struct camera3_jpeg_blob); auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr); blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; blob->jpeg_size = jpeg_size; /* Update the JPEG result Metadata. */ resultMetadata->addEntry(ANDROID_JPEG_SIZE, jpeg_size); processComplete.emit(streamBuffer, PostProcessor::Status::Success); }
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/encoder.h
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Image encoding interface */ #pragma once #include <libcamera/base/span.h> #include <libcamera/framebuffer.h> #include <libcamera/stream.h> #include "../camera_request.h" class Encoder { public: virtual ~Encoder() = default; virtual int configure(const libcamera::StreamConfiguration &cfg) = 0; virtual int encode(Camera3RequestDescriptor::StreamBuffer *buffer, libcamera::Span<const uint8_t> exifData, unsigned int quality) = 0; };
0
repos/libcamera/src/android
repos/libcamera/src/android/jpeg/encoder_libjpeg.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * JPEG encoding using libjpeg native API */ #include "encoder_libjpeg.h" #include <fcntl.h> #include <iomanip> #include <iostream> #include <sstream> #include <string.h> #include <unistd.h> #include <vector> #include <libcamera/base/log.h> #include <libcamera/camera.h> #include <libcamera/formats.h> #include <libcamera/pixel_format.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/mapped_framebuffer.h" #include "../camera_buffer.h" using namespace libcamera; LOG_DECLARE_CATEGORY(JPEG) namespace { struct JPEGPixelFormatInfo { J_COLOR_SPACE colorSpace; const PixelFormatInfo &pixelFormatInfo; bool nvSwap; }; const std::map<PixelFormat, JPEGPixelFormatInfo> pixelInfo{ { formats::R8, { JCS_GRAYSCALE, PixelFormatInfo::info(formats::R8), false } }, { formats::RGB888, { JCS_EXT_BGR, PixelFormatInfo::info(formats::RGB888), false } }, { formats::BGR888, { JCS_EXT_RGB, PixelFormatInfo::info(formats::BGR888), false } }, { formats::NV12, { JCS_YCbCr, PixelFormatInfo::info(formats::NV12), false } }, { formats::NV21, { JCS_YCbCr, PixelFormatInfo::info(formats::NV21), true } }, { formats::NV16, { JCS_YCbCr, PixelFormatInfo::info(formats::NV16), false } }, { formats::NV61, { JCS_YCbCr, PixelFormatInfo::info(formats::NV61), true } }, { formats::NV24, { JCS_YCbCr, PixelFormatInfo::info(formats::NV24), false } }, { formats::NV42, { JCS_YCbCr, PixelFormatInfo::info(formats::NV42), true } }, }; const struct JPEGPixelFormatInfo &findPixelInfo(const PixelFormat &format) { static const struct JPEGPixelFormatInfo invalidPixelFormat { JCS_UNKNOWN, PixelFormatInfo(), false }; const auto iter = pixelInfo.find(format); if (iter == pixelInfo.end()) { LOG(JPEG, Error) << "Unsupported pixel format for JPEG encoder: " << format; return invalidPixelFormat; } return iter->second; } } /* namespace */ EncoderLibJpeg::EncoderLibJpeg() { /* \todo Expand error handling coverage with a custom handler. */ compress_.err = jpeg_std_error(&jerr_); jpeg_create_compress(&compress_); } EncoderLibJpeg::~EncoderLibJpeg() { jpeg_destroy_compress(&compress_); } int EncoderLibJpeg::configure(const StreamConfiguration &cfg) { const struct JPEGPixelFormatInfo info = findPixelInfo(cfg.pixelFormat); if (info.colorSpace == JCS_UNKNOWN) return -ENOTSUP; compress_.image_width = cfg.size.width; compress_.image_height = cfg.size.height; compress_.in_color_space = info.colorSpace; compress_.input_components = info.colorSpace == JCS_GRAYSCALE ? 1 : 3; jpeg_set_defaults(&compress_); pixelFormatInfo_ = &info.pixelFormatInfo; nv_ = pixelFormatInfo_->numPlanes() == 2; nvSwap_ = info.nvSwap; return 0; } void EncoderLibJpeg::compressRGB(const std::vector<Span<uint8_t>> &planes) { unsigned char *src = const_cast<unsigned char *>(planes[0].data()); /* \todo Stride information should come from buffer configuration. */ unsigned int stride = pixelFormatInfo_->stride(compress_.image_width, 0); JSAMPROW row_pointer[1]; while (compress_.next_scanline < compress_.image_height) { row_pointer[0] = &src[compress_.next_scanline * stride]; jpeg_write_scanlines(&compress_, row_pointer, 1); } } /* * Compress the incoming buffer from a supported NV format. * This naively unpacks the semi-planar NV12 to a YUV888 format for libjpeg. */ void EncoderLibJpeg::compressNV(const std::vector<Span<uint8_t>> &planes) { uint8_t tmprowbuf[compress_.image_width * 3]; /* * \todo Use the raw api, and only unpack the cb/cr samples to new line * buffers. If possible, see if we can set appropriate pixel strides * too to save even that copy. * * Possible hints at: * https://sourceforge.net/p/libjpeg/mailman/message/30815123/ */ unsigned int y_stride = pixelFormatInfo_->stride(compress_.image_width, 0); unsigned int c_stride = pixelFormatInfo_->stride(compress_.image_width, 1); unsigned int horzSubSample = 2 * compress_.image_width / c_stride; unsigned int vertSubSample = pixelFormatInfo_->planes[1].verticalSubSampling; unsigned int c_inc = horzSubSample == 1 ? 2 : 0; unsigned int cb_pos = nvSwap_ ? 1 : 0; unsigned int cr_pos = nvSwap_ ? 0 : 1; const unsigned char *src = planes[0].data(); const unsigned char *src_c = planes[1].data(); JSAMPROW row_pointer[1]; row_pointer[0] = &tmprowbuf[0]; for (unsigned int y = 0; y < compress_.image_height; y++) { unsigned char *dst = &tmprowbuf[0]; const unsigned char *src_y = src + y * y_stride; const unsigned char *src_cb = src_c + (y / vertSubSample) * c_stride + cb_pos; const unsigned char *src_cr = src_c + (y / vertSubSample) * c_stride + cr_pos; for (unsigned int x = 0; x < compress_.image_width; x += 2) { dst[0] = *src_y; dst[1] = *src_cb; dst[2] = *src_cr; src_y++; src_cb += c_inc; src_cr += c_inc; dst += 3; dst[0] = *src_y; dst[1] = *src_cb; dst[2] = *src_cr; src_y++; src_cb += 2; src_cr += 2; dst += 3; } jpeg_write_scanlines(&compress_, row_pointer, 1); } } int EncoderLibJpeg::encode(Camera3RequestDescriptor::StreamBuffer *buffer, libcamera::Span<const uint8_t> exifData, unsigned int quality) { MappedFrameBuffer frame(buffer->srcBuffer, MappedFrameBuffer::MapFlag::Read); if (!frame.isValid()) { LOG(JPEG, Error) << "Failed to map FrameBuffer : " << strerror(frame.error()); return frame.error(); } return encode(frame.planes(), buffer->dstBuffer->plane(0), exifData, quality); } int EncoderLibJpeg::encode(const std::vector<Span<uint8_t>> &src, Span<uint8_t> dest, Span<const uint8_t> exifData, unsigned int quality) { unsigned char *destination = dest.data(); unsigned long size = dest.size(); jpeg_set_quality(&compress_, quality, TRUE); /* * The jpeg_mem_dest will reallocate if the required size is not * sufficient. That means the output won't be written to the correct * buffers. * * \todo Implement our own custom memory destination to prevent * reallocation and prefer failure with correct reporting. */ jpeg_mem_dest(&compress_, &destination, &size); jpeg_start_compress(&compress_, TRUE); if (exifData.size()) /* Store Exif data in the JPEG_APP1 data block. */ jpeg_write_marker(&compress_, JPEG_APP0 + 1, static_cast<const JOCTET *>(exifData.data()), exifData.size()); LOG(JPEG, Debug) << "JPEG Encode Starting:" << compress_.image_width << "x" << compress_.image_height; ASSERT(src.size() == pixelFormatInfo_->numPlanes()); if (nv_) compressNV(src); else compressRGB(src); jpeg_finish_compress(&compress_); return size; }
0
repos/libcamera/src/android
repos/libcamera/src/android/mm/generic_frame_buffer_allocator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Allocate FrameBuffer using gralloc API */ #include <dlfcn.h> #include <memory> #include <vector> #include <libcamera/base/log.h> #include <libcamera/base/shared_fd.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/framebuffer.h" #include <hardware/camera3.h> #include <hardware/gralloc.h> #include <hardware/hardware.h> #include "../camera_device.h" #include "../frame_buffer_allocator.h" #include "../hal_framebuffer.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) namespace { class GenericFrameBufferData : public FrameBuffer::Private { LIBCAMERA_DECLARE_PUBLIC(FrameBuffer) public: GenericFrameBufferData(struct alloc_device_t *allocDevice, buffer_handle_t handle, const std::vector<FrameBuffer::Plane> &planes) : FrameBuffer::Private(planes), allocDevice_(allocDevice), handle_(handle) { ASSERT(allocDevice_); ASSERT(handle_); } ~GenericFrameBufferData() override { /* * allocDevice_ is used to destroy handle_. allocDevice_ is * owned by PlatformFrameBufferAllocator::Private. * GenericFrameBufferData must be destroyed before it is * destroyed. * * \todo Consider managing alloc_device_t with std::shared_ptr * if this is difficult to maintain. * * \todo Thread safety against alloc_device_t is not documented. * Is it no problem to call alloc/free in parallel? */ allocDevice_->free(allocDevice_, handle_); } private: struct alloc_device_t *allocDevice_; const buffer_handle_t handle_; }; } /* namespace */ class PlatformFrameBufferAllocator::Private : public Extensible::Private { LIBCAMERA_DECLARE_PUBLIC(PlatformFrameBufferAllocator) public: Private(CameraDevice *const cameraDevice) : cameraDevice_(cameraDevice), hardwareModule_(nullptr), allocDevice_(nullptr) { hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &hardwareModule_); ASSERT(hardwareModule_); } ~Private() override; std::unique_ptr<HALFrameBuffer> allocate(int halPixelFormat, const libcamera::Size &size, uint32_t usage); private: const CameraDevice *const cameraDevice_; const struct hw_module_t *hardwareModule_; struct alloc_device_t *allocDevice_; }; PlatformFrameBufferAllocator::Private::~Private() { if (allocDevice_) gralloc_close(allocDevice_); dlclose(hardwareModule_->dso); } std::unique_ptr<HALFrameBuffer> PlatformFrameBufferAllocator::Private::allocate(int halPixelFormat, const libcamera::Size &size, uint32_t usage) { if (!allocDevice_) { int ret = gralloc_open(hardwareModule_, &allocDevice_); if (ret) { LOG(HAL, Fatal) << "gralloc_open() failed: " << ret; return nullptr; } } int stride = 0; buffer_handle_t handle = nullptr; int ret = allocDevice_->alloc(allocDevice_, size.width, size.height, halPixelFormat, usage, &handle, &stride); if (ret) { LOG(HAL, Error) << "failed buffer allocation: " << ret; return nullptr; } if (!handle) { LOG(HAL, Fatal) << "invalid buffer_handle_t"; return nullptr; } /* This code assumes the planes are mapped consecutively. */ const libcamera::PixelFormat pixelFormat = cameraDevice_->capabilities()->toPixelFormat(halPixelFormat); const auto &info = PixelFormatInfo::info(pixelFormat); std::vector<FrameBuffer::Plane> planes(info.numPlanes()); SharedFD fd{ handle->data[0] }; size_t offset = 0; for (auto [i, plane] : utils::enumerate(planes)) { const size_t planeSize = info.planeSize(size.height, i, stride); plane.fd = fd; plane.offset = offset; plane.length = planeSize; offset += planeSize; } return std::make_unique<HALFrameBuffer>( std::make_unique<GenericFrameBufferData>( allocDevice_, handle, planes), handle); } PUBLIC_FRAME_BUFFER_ALLOCATOR_IMPLEMENTATION
0
repos/libcamera/src/android
repos/libcamera/src/android/mm/cros_frame_buffer_allocator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Allocate FrameBuffer for Chromium OS using CameraBufferManager */ #include <memory> #include <vector> #include <libcamera/base/log.h> #include <libcamera/base/shared_fd.h> #include "libcamera/internal/framebuffer.h" #include "../camera_device.h" #include "../frame_buffer_allocator.h" #include "../hal_framebuffer.h" #include "cros-camera/camera_buffer_manager.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) namespace { class CrosFrameBufferData : public FrameBuffer::Private { LIBCAMERA_DECLARE_PUBLIC(FrameBuffer) public: CrosFrameBufferData(cros::ScopedBufferHandle scopedHandle, const std::vector<FrameBuffer::Plane> &planes) : FrameBuffer::Private(planes), scopedHandle_(std::move(scopedHandle)) { } private: cros::ScopedBufferHandle scopedHandle_; }; } /* namespace */ class PlatformFrameBufferAllocator::Private : public Extensible::Private { LIBCAMERA_DECLARE_PUBLIC(PlatformFrameBufferAllocator) public: Private([[maybe_unused]] CameraDevice *const cameraDevice) { } std::unique_ptr<HALFrameBuffer> allocate(int halPixelFormat, const libcamera::Size &size, uint32_t usage); }; std::unique_ptr<HALFrameBuffer> PlatformFrameBufferAllocator::Private::allocate(int halPixelFormat, const libcamera::Size &size, uint32_t usage) { cros::ScopedBufferHandle scopedHandle = cros::CameraBufferManager::AllocateScopedBuffer( size.width, size.height, halPixelFormat, usage); if (!scopedHandle) { LOG(HAL, Error) << "Failed to allocate buffer handle"; return nullptr; } buffer_handle_t handle = *scopedHandle; SharedFD fd{ handle->data[0] }; if (!fd.isValid()) { LOG(HAL, Fatal) << "Invalid fd"; return nullptr; } /* This code assumes all the planes are located in the same buffer. */ const size_t numPlanes = cros::CameraBufferManager::GetNumPlanes(handle); std::vector<FrameBuffer::Plane> planes(numPlanes); for (auto [i, plane] : utils::enumerate(planes)) { plane.fd = fd; plane.offset = cros::CameraBufferManager::GetPlaneOffset(handle, i); plane.length = cros::CameraBufferManager::GetPlaneSize(handle, i); } return std::make_unique<HALFrameBuffer>( std::make_unique<CrosFrameBufferData>(std::move(scopedHandle), planes), handle); } PUBLIC_FRAME_BUFFER_ALLOCATOR_IMPLEMENTATION
0
repos/libcamera/src/android
repos/libcamera/src/android/mm/cros_camera_buffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Chromium OS buffer backend using CameraBufferManager */ #include "../camera_buffer.h" #include <libcamera/base/log.h> #include "cros-camera/camera_buffer_manager.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) class CameraBuffer::Private : public Extensible::Private { LIBCAMERA_DECLARE_PUBLIC(CameraBuffer) public: Private(CameraBuffer *cameraBuffer, buffer_handle_t camera3Buffer, PixelFormat pixelFormat, const Size &size, int flags); ~Private(); bool isValid() const { return registered_; } unsigned int numPlanes() const; Span<uint8_t> plane(unsigned int plane); unsigned int stride(unsigned int plane) const; unsigned int offset(unsigned int plane) const; unsigned int size(unsigned int plane) const; size_t jpegBufferSize(size_t maxJpegBufferSize) const; private: void map(); cros::CameraBufferManager *bufferManager_; buffer_handle_t handle_; unsigned int numPlanes_; bool mapped_; bool registered_; union { void *addr; android_ycbcr ycbcr; } mem; }; CameraBuffer::Private::Private([[maybe_unused]] CameraBuffer *cameraBuffer, buffer_handle_t camera3Buffer, [[maybe_unused]] PixelFormat pixelFormat, [[maybe_unused]] const Size &size, [[maybe_unused]] int flags) : handle_(camera3Buffer), numPlanes_(0), mapped_(false), registered_(false) { bufferManager_ = cros::CameraBufferManager::GetInstance(); if (!bufferManager_) { LOG(HAL, Fatal) << "Failed to get cros CameraBufferManager instance"; return; } int ret = bufferManager_->Register(camera3Buffer); if (ret) { LOG(HAL, Error) << "Failed registering a buffer: " << ret; return; } registered_ = true; numPlanes_ = bufferManager_->GetNumPlanes(camera3Buffer); } CameraBuffer::Private::~Private() { int ret; if (mapped_) { ret = bufferManager_->Unlock(handle_); if (ret != 0) LOG(HAL, Error) << "Failed to unlock buffer: " << strerror(-ret); } if (registered_) { ret = bufferManager_->Deregister(handle_); if (ret != 0) LOG(HAL, Error) << "Failed to deregister buffer: " << strerror(-ret); } } unsigned int CameraBuffer::Private::numPlanes() const { return bufferManager_->GetNumPlanes(handle_); } Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane) { if (!mapped_) map(); if (!mapped_) return {}; void *addr; switch (numPlanes()) { case 1: addr = mem.addr; break; default: switch (plane) { case 0: addr = mem.ycbcr.y; break; case 1: addr = mem.ycbcr.cb; break; case 2: addr = mem.ycbcr.cr; break; } } return { static_cast<uint8_t *>(addr), bufferManager_->GetPlaneSize(handle_, plane) }; } unsigned int CameraBuffer::Private::stride(unsigned int plane) const { return cros::CameraBufferManager::GetPlaneStride(handle_, plane); } unsigned int CameraBuffer::Private::offset(unsigned int plane) const { return cros::CameraBufferManager::GetPlaneOffset(handle_, plane); } unsigned int CameraBuffer::Private::size(unsigned int plane) const { return cros::CameraBufferManager::GetPlaneSize(handle_, plane); } size_t CameraBuffer::Private::jpegBufferSize([[maybe_unused]] size_t maxJpegBufferSize) const { return bufferManager_->GetPlaneSize(handle_, 0); } void CameraBuffer::Private::map() { int ret; switch (numPlanes_) { case 1: { ret = bufferManager_->Lock(handle_, 0, 0, 0, 0, 0, &mem.addr); if (ret) { LOG(HAL, Error) << "Single plane buffer mapping failed"; return; } break; } case 2: case 3: { ret = bufferManager_->LockYCbCr(handle_, 0, 0, 0, 0, 0, &mem.ycbcr); if (ret) { LOG(HAL, Error) << "YCbCr buffer mapping failed"; return; } break; } default: LOG(HAL, Error) << "Invalid number of planes: " << numPlanes_; return; } mapped_ = true; return; } PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
0
repos/libcamera/src/android
repos/libcamera/src/android/mm/generic_camera_buffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Generic Android frame buffer backend */ #include "../camera_buffer.h" #include <sys/mman.h> #include <unistd.h> #include <libcamera/base/log.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/mapped_framebuffer.h" using namespace libcamera; LOG_DECLARE_CATEGORY(HAL) class CameraBuffer::Private : public Extensible::Private, public MappedBuffer { LIBCAMERA_DECLARE_PUBLIC(CameraBuffer) public: Private(CameraBuffer *cameraBuffer, buffer_handle_t camera3Buffer, PixelFormat pixelFormat, const Size &size, int flags); ~Private(); unsigned int numPlanes() const; Span<uint8_t> plane(unsigned int plane); unsigned int stride(unsigned int plane) const; unsigned int offset(unsigned int plane) const; unsigned int size(unsigned int plane) const; size_t jpegBufferSize(size_t maxJpegBufferSize) const; private: struct PlaneInfo { unsigned int stride; unsigned int offset; unsigned int size; }; void map(); int fd_; int flags_; off_t bufferLength_; bool mapped_; std::vector<PlaneInfo> planeInfo_; }; CameraBuffer::Private::Private([[maybe_unused]] CameraBuffer *cameraBuffer, buffer_handle_t camera3Buffer, PixelFormat pixelFormat, const Size &size, int flags) : fd_(-1), flags_(flags), bufferLength_(-1), mapped_(false) { error_ = 0; const auto &info = PixelFormatInfo::info(pixelFormat); if (!info.isValid()) { error_ = -EINVAL; LOG(HAL, Error) << "Invalid pixel format: " << pixelFormat; return; } /* * As Android doesn't offer an API to query buffer layouts, assume for * now that the buffer is backed by a single dmabuf, with planes being * stored contiguously. */ for (int i = 0; i < camera3Buffer->numFds; i++) { if (camera3Buffer->data[i] == -1 || camera3Buffer->data[i] == fd_) continue; if (fd_ != -1) { error_ = -EINVAL; LOG(HAL, Error) << "Discontiguous planes are not supported"; return; } fd_ = camera3Buffer->data[i]; } if (fd_ == -1) { error_ = -EINVAL; LOG(HAL, Error) << "No valid file descriptor"; return; } bufferLength_ = lseek(fd_, 0, SEEK_END); if (bufferLength_ < 0) { error_ = -errno; LOG(HAL, Error) << "Failed to get buffer length"; return; } const unsigned int numPlanes = info.numPlanes(); planeInfo_.resize(numPlanes); unsigned int offset = 0; for (unsigned int i = 0; i < numPlanes; ++i) { const unsigned int planeSize = info.planeSize(size, i); planeInfo_[i].stride = info.stride(size.width, i, 1u); planeInfo_[i].offset = offset; planeInfo_[i].size = planeSize; if (bufferLength_ < offset + planeSize) { LOG(HAL, Error) << "Plane " << i << " is out of buffer:" << " plane offset=" << offset << ", plane size=" << planeSize << ", buffer length=" << bufferLength_; return; } offset += planeSize; } } CameraBuffer::Private::~Private() { } unsigned int CameraBuffer::Private::numPlanes() const { return planeInfo_.size(); } Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane) { if (!mapped_) map(); if (!mapped_) return {}; return planes_[plane]; } unsigned int CameraBuffer::Private::stride(unsigned int plane) const { if (plane >= planeInfo_.size()) return 0; return planeInfo_[plane].stride; } unsigned int CameraBuffer::Private::offset(unsigned int plane) const { if (plane >= planeInfo_.size()) return 0; return planeInfo_[plane].offset; } unsigned int CameraBuffer::Private::size(unsigned int plane) const { if (plane >= planeInfo_.size()) return 0; return planeInfo_[plane].size; } size_t CameraBuffer::Private::jpegBufferSize(size_t maxJpegBufferSize) const { ASSERT(bufferLength_ >= 0); return std::min<unsigned int>(bufferLength_, maxJpegBufferSize); } void CameraBuffer::Private::map() { ASSERT(fd_ != -1); ASSERT(bufferLength_ >= 0); void *address = mmap(nullptr, bufferLength_, flags_, MAP_SHARED, fd_, 0); if (address == MAP_FAILED) { error_ = -errno; LOG(HAL, Error) << "Failed to mmap plane"; return; } maps_.emplace_back(static_cast<uint8_t *>(address), bufferLength_); planes_.reserve(planeInfo_.size()); for (const auto &info : planeInfo_) { planes_.emplace_back( static_cast<uint8_t *>(address) + info.offset, info.size); } mapped_ = true; } PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
0
repos/libcamera/src/android
repos/libcamera/src/android/mm/libhardware_stub.c
/* SPDX-License-Identifier: Apache-2.0 */ /* * Copyright (C) 2023, Ideas on Board * * Android libhardware stub for test compilation */ #include <errno.h> #include <hardware/hardware.h> int hw_get_module(const char *id __attribute__((__unused__)), const struct hw_module_t **module) { *module = NULL; return -ENOTSUP; }
0
repos/libcamera/src/android
repos/libcamera/src/android/cros/camera3_hal.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * cros-specific components of Android Camera HALv3 module */ #include <cros-camera/cros_camera_hal.h> #include "../camera_hal_manager.h" #include "../cros_mojo_token.h" static void set_up(cros::CameraMojoChannelManagerToken *token) { gCrosMojoToken = token; } static void tear_down() { delete CameraHalManager::instance(); } cros::cros_camera_hal_t CROS_CAMERA_EXPORT CROS_CAMERA_HAL_INFO_SYM = { .set_up = set_up, .tear_down = tear_down };
0
repos/libcamera/src/android
repos/libcamera/src/android/metadata/camera_metadata.c
/* SPDX-License-Identifier: Apache-2.0 */ /* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define LOG_TAG "camera_metadata" /* * Replace ALOGE() with a fprintf to stderr so that we don't need to * re-implement Android's logging system. The log/log.h header file is no * longer necessary once we removed dependency on ALOGE(). */ #define ALOGE(...) fprintf(stderr, LOG_TAG __VA_ARGS__) #include <system/camera_metadata.h> #include <camera_metadata_hidden.h> #include <assert.h> #include <errno.h> #include <inttypes.h> #include <stddef.h> // for offsetof #include <stdio.h> #include <stdlib.h> #define OK 0 #define ERROR 1 #define NOT_FOUND (-ENOENT) #define SN_EVENT_LOG_ID 0x534e4554 #define ALIGN_TO(val, alignment) \ (((uintptr_t)(val) + ((alignment) - 1)) & ~((alignment) - 1)) /** * A single metadata entry, storing an array of values of a given type. If the * array is no larger than 4 bytes in size, it is stored in the data.value[] * array; otherwise, it can found in the parent's data array at index * data.offset. */ #define ENTRY_ALIGNMENT ((size_t) 4) typedef struct camera_metadata_buffer_entry { uint32_t tag; uint32_t count; union { uint32_t offset; uint8_t value[4]; } data; uint8_t type; uint8_t reserved[3]; } camera_metadata_buffer_entry_t; typedef uint32_t metadata_uptrdiff_t; typedef uint32_t metadata_size_t; /** * A packet of metadata. This is a list of entries, each of which may point to * its values stored at an offset in data. * * It is assumed by the utility functions that the memory layout of the packet * is as follows: * * |-----------------------------------------------| * | camera_metadata_t | * | | * |-----------------------------------------------| * | reserved for future expansion | * |-----------------------------------------------| * | camera_metadata_buffer_entry_t #0 | * |-----------------------------------------------| * | .... | * |-----------------------------------------------| * | camera_metadata_buffer_entry_t #entry_count-1 | * |-----------------------------------------------| * | free space for | * | (entry_capacity-entry_count) entries | * |-----------------------------------------------| * | start of camera_metadata.data | * | | * |-----------------------------------------------| * | free space for | * | (data_capacity-data_count) bytes | * |-----------------------------------------------| * * With the total length of the whole packet being camera_metadata.size bytes. * * In short, the entries and data are contiguous in memory after the metadata * header. */ #define METADATA_ALIGNMENT ((size_t) 4) struct camera_metadata { metadata_size_t size; uint32_t version; uint32_t flags; metadata_size_t entry_count; metadata_size_t entry_capacity; metadata_uptrdiff_t entries_start; // Offset from camera_metadata metadata_size_t data_count; metadata_size_t data_capacity; metadata_uptrdiff_t data_start; // Offset from camera_metadata uint32_t padding; // padding to 8 bytes boundary metadata_vendor_id_t vendor_id; }; /** * A datum of metadata. This corresponds to camera_metadata_entry_t::data * with the difference that each element is not a pointer. We need to have a * non-pointer type description in order to figure out the largest alignment * requirement for data (DATA_ALIGNMENT). */ #define DATA_ALIGNMENT ((size_t) 8) typedef union camera_metadata_data { uint8_t u8; int32_t i32; float f; int64_t i64; double d; camera_metadata_rational_t r; } camera_metadata_data_t; _Static_assert(sizeof(metadata_size_t) == 4, "Size of metadata_size_t must be 4"); _Static_assert(sizeof(metadata_uptrdiff_t) == 4, "Size of metadata_uptrdiff_t must be 4"); _Static_assert(sizeof(metadata_vendor_id_t) == 8, "Size of metadata_vendor_id_t must be 8"); _Static_assert(sizeof(camera_metadata_data_t) == 8, "Size of camera_metadata_data_t must be 8"); _Static_assert(offsetof(camera_metadata_buffer_entry_t, tag) == 0, "Offset of tag must be 0"); _Static_assert(offsetof(camera_metadata_buffer_entry_t, count) == 4, "Offset of count must be 4"); _Static_assert(offsetof(camera_metadata_buffer_entry_t, data) == 8, "Offset of data must be 8"); _Static_assert(offsetof(camera_metadata_buffer_entry_t, type) == 12, "Offset of type must be 12"); _Static_assert(sizeof(camera_metadata_buffer_entry_t) == 16, "Size of camera_metadata_buffer_entry_t must be 16"); _Static_assert(offsetof(camera_metadata_t, size) == 0, "Offset of size must be 0"); _Static_assert(offsetof(camera_metadata_t, version) == 4, "Offset of version must be 4"); _Static_assert(offsetof(camera_metadata_t, flags) == 8, "Offset of flags must be 8"); _Static_assert(offsetof(camera_metadata_t, entry_count) == 12, "Offset of entry_count must be 12"); _Static_assert(offsetof(camera_metadata_t, entry_capacity) == 16, "Offset of entry_capacity must be 16"); _Static_assert(offsetof(camera_metadata_t, entries_start) == 20, "Offset of entries_start must be 20"); _Static_assert(offsetof(camera_metadata_t, data_count) == 24, "Offset of data_count must be 24"); _Static_assert(offsetof(camera_metadata_t, data_capacity) == 28, "Offset of data_capacity must be 28"); _Static_assert(offsetof(camera_metadata_t, data_start) == 32, "Offset of data_start must be 32"); _Static_assert(offsetof(camera_metadata_t, vendor_id) == 40, "Offset of vendor_id must be 40"); _Static_assert(sizeof(camera_metadata_t) == 48, "Size of camera_metadata_t must be 48"); /** * The preferred alignment of a packet of camera metadata. In general, * this is the lowest common multiple of the constituents of a metadata * package, i.e, of DATA_ALIGNMENT and ENTRY_ALIGNMENT. */ #define MAX_ALIGNMENT(A, B) (((A) > (B)) ? (A) : (B)) #define METADATA_PACKET_ALIGNMENT \ MAX_ALIGNMENT(MAX_ALIGNMENT(DATA_ALIGNMENT, METADATA_ALIGNMENT), ENTRY_ALIGNMENT) /** Versioning information */ #define CURRENT_METADATA_VERSION 1 /** Flag definitions */ #define FLAG_SORTED 0x00000001 /** Tag information */ typedef struct tag_info { const char *tag_name; uint8_t tag_type; } tag_info_t; #include "camera_metadata_tag_info.c" const size_t camera_metadata_type_size[NUM_TYPES] = { [TYPE_BYTE] = sizeof(uint8_t), [TYPE_INT32] = sizeof(int32_t), [TYPE_FLOAT] = sizeof(float), [TYPE_INT64] = sizeof(int64_t), [TYPE_DOUBLE] = sizeof(double), [TYPE_RATIONAL] = sizeof(camera_metadata_rational_t) }; const char *camera_metadata_type_names[NUM_TYPES] = { [TYPE_BYTE] = "byte", [TYPE_INT32] = "int32", [TYPE_FLOAT] = "float", [TYPE_INT64] = "int64", [TYPE_DOUBLE] = "double", [TYPE_RATIONAL] = "rational" }; static camera_metadata_buffer_entry_t *get_entries( const camera_metadata_t *metadata) { return (camera_metadata_buffer_entry_t*) ((uint8_t*)metadata + metadata->entries_start); } static uint8_t *get_data(const camera_metadata_t *metadata) { return (uint8_t*)metadata + metadata->data_start; } size_t get_camera_metadata_alignment() { return METADATA_PACKET_ALIGNMENT; } camera_metadata_t *allocate_copy_camera_metadata_checked( const camera_metadata_t *src, size_t src_size) { if (src == NULL) { return NULL; } if (src_size < sizeof(camera_metadata_t)) { ALOGE("%s: Source size too small!", __FUNCTION__); // android_errorWriteLog(0x534e4554, "67782345"); return NULL; } void *buffer = malloc(src_size); memcpy(buffer, src, src_size); camera_metadata_t *metadata = (camera_metadata_t*) buffer; if (validate_camera_metadata_structure(metadata, &src_size) != OK) { free(buffer); return NULL; } return metadata; } camera_metadata_t *allocate_camera_metadata(size_t entry_capacity, size_t data_capacity) { size_t memory_needed = calculate_camera_metadata_size(entry_capacity, data_capacity); void *buffer = malloc(memory_needed); camera_metadata_t *metadata = place_camera_metadata( buffer, memory_needed, entry_capacity, data_capacity); if (!metadata) { /* This should not happen when memory_needed is the same * calculated in this function and in place_camera_metadata. */ free(buffer); } return metadata; } camera_metadata_t *place_camera_metadata(void *dst, size_t dst_size, size_t entry_capacity, size_t data_capacity) { if (dst == NULL) return NULL; size_t memory_needed = calculate_camera_metadata_size(entry_capacity, data_capacity); if (memory_needed > dst_size) return NULL; camera_metadata_t *metadata = (camera_metadata_t*)dst; metadata->version = CURRENT_METADATA_VERSION; metadata->flags = 0; metadata->entry_count = 0; metadata->entry_capacity = entry_capacity; metadata->entries_start = ALIGN_TO(sizeof(camera_metadata_t), ENTRY_ALIGNMENT); metadata->data_count = 0; metadata->data_capacity = data_capacity; metadata->size = memory_needed; size_t data_unaligned = (uint8_t*)(get_entries(metadata) + metadata->entry_capacity) - (uint8_t*)metadata; metadata->data_start = ALIGN_TO(data_unaligned, DATA_ALIGNMENT); metadata->vendor_id = CAMERA_METADATA_INVALID_VENDOR_ID; assert(validate_camera_metadata_structure(metadata, NULL) == OK); return metadata; } void free_camera_metadata(camera_metadata_t *metadata) { free(metadata); } size_t calculate_camera_metadata_size(size_t entry_count, size_t data_count) { size_t memory_needed = sizeof(camera_metadata_t); // Start entry list at aligned boundary memory_needed = ALIGN_TO(memory_needed, ENTRY_ALIGNMENT); memory_needed += sizeof(camera_metadata_buffer_entry_t[entry_count]); // Start buffer list at aligned boundary memory_needed = ALIGN_TO(memory_needed, DATA_ALIGNMENT); memory_needed += sizeof(uint8_t[data_count]); // Make sure camera metadata can be stacked in continuous memory memory_needed = ALIGN_TO(memory_needed, METADATA_PACKET_ALIGNMENT); return memory_needed; } size_t get_camera_metadata_size(const camera_metadata_t *metadata) { if (metadata == NULL) return ERROR; return metadata->size; } size_t get_camera_metadata_compact_size(const camera_metadata_t *metadata) { if (metadata == NULL) return ERROR; return calculate_camera_metadata_size(metadata->entry_count, metadata->data_count); } size_t get_camera_metadata_entry_count(const camera_metadata_t *metadata) { return metadata->entry_count; } size_t get_camera_metadata_entry_capacity(const camera_metadata_t *metadata) { return metadata->entry_capacity; } size_t get_camera_metadata_data_count(const camera_metadata_t *metadata) { return metadata->data_count; } size_t get_camera_metadata_data_capacity(const camera_metadata_t *metadata) { return metadata->data_capacity; } camera_metadata_t* copy_camera_metadata(void *dst, size_t dst_size, const camera_metadata_t *src) { size_t memory_needed = get_camera_metadata_compact_size(src); if (dst == NULL) return NULL; if (dst_size < memory_needed) return NULL; camera_metadata_t *metadata = place_camera_metadata(dst, dst_size, src->entry_count, src->data_count); metadata->flags = src->flags; metadata->entry_count = src->entry_count; metadata->data_count = src->data_count; metadata->vendor_id = src->vendor_id; memcpy(get_entries(metadata), get_entries(src), sizeof(camera_metadata_buffer_entry_t[metadata->entry_count])); memcpy(get_data(metadata), get_data(src), sizeof(uint8_t[metadata->data_count])); assert(validate_camera_metadata_structure(metadata, NULL) == OK); return metadata; } // This method should be used when the camera metadata cannot be trusted. For example, when it's // read from Parcel. static int validate_and_calculate_camera_metadata_entry_data_size(size_t *data_size, uint8_t type, size_t data_count) { if (type >= NUM_TYPES) return ERROR; // Check for overflow if (data_count != 0 && camera_metadata_type_size[type] > (SIZE_MAX - DATA_ALIGNMENT + 1) / data_count) { // android_errorWriteLog(SN_EVENT_LOG_ID, "30741779"); return ERROR; } size_t data_bytes = data_count * camera_metadata_type_size[type]; if (data_size) { *data_size = data_bytes <= 4 ? 0 : ALIGN_TO(data_bytes, DATA_ALIGNMENT); } return OK; } size_t calculate_camera_metadata_entry_data_size(uint8_t type, size_t data_count) { if (type >= NUM_TYPES) return 0; size_t data_bytes = data_count * camera_metadata_type_size[type]; return data_bytes <= 4 ? 0 : ALIGN_TO(data_bytes, DATA_ALIGNMENT); } int validate_camera_metadata_structure(const camera_metadata_t *metadata, const size_t *expected_size) { if (metadata == NULL) { ALOGE("%s: metadata is null!", __FUNCTION__); return CAMERA_METADATA_VALIDATION_ERROR; } uintptr_t aligned_ptr = ALIGN_TO(metadata, METADATA_PACKET_ALIGNMENT); const uintptr_t alignmentOffset = aligned_ptr - (uintptr_t) metadata; // Check that the metadata pointer is well-aligned first. { static const struct { const char *name; size_t alignment; } alignments[] = { { .name = "camera_metadata", .alignment = METADATA_ALIGNMENT }, { .name = "camera_metadata_buffer_entry", .alignment = ENTRY_ALIGNMENT }, { .name = "camera_metadata_data", .alignment = DATA_ALIGNMENT }, }; for (size_t i = 0; i < sizeof(alignments)/sizeof(alignments[0]); ++i) { uintptr_t aligned_ptr = ALIGN_TO((uintptr_t) metadata + alignmentOffset, alignments[i].alignment); if ((uintptr_t)metadata + alignmentOffset != aligned_ptr) { ALOGE("%s: Metadata pointer is not aligned (actual %p, " "expected %p, offset %" PRIuPTR ") to type %s", __FUNCTION__, metadata, (void*)aligned_ptr, alignmentOffset, alignments[i].name); return CAMERA_METADATA_VALIDATION_ERROR; } } } /** * Check that the metadata contents are correct */ if (expected_size != NULL && metadata->size > *expected_size) { ALOGE("%s: Metadata size (%" PRIu32 ") should be <= expected size (%zu)", __FUNCTION__, metadata->size, *expected_size); return CAMERA_METADATA_VALIDATION_ERROR; } if (metadata->entry_count > metadata->entry_capacity) { ALOGE("%s: Entry count (%" PRIu32 ") should be <= entry capacity " "(%" PRIu32 ")", __FUNCTION__, metadata->entry_count, metadata->entry_capacity); return CAMERA_METADATA_VALIDATION_ERROR; } if (metadata->data_count > metadata->data_capacity) { ALOGE("%s: Data count (%" PRIu32 ") should be <= data capacity " "(%" PRIu32 ")", __FUNCTION__, metadata->data_count, metadata->data_capacity); // android_errorWriteLog(SN_EVENT_LOG_ID, "30591838"); return CAMERA_METADATA_VALIDATION_ERROR; } const metadata_uptrdiff_t entries_end = metadata->entries_start + metadata->entry_capacity; if (entries_end < metadata->entries_start || // overflow check entries_end > metadata->data_start) { ALOGE("%s: Entry start + capacity (%" PRIu32 ") should be <= data start " "(%" PRIu32 ")", __FUNCTION__, (metadata->entries_start + metadata->entry_capacity), metadata->data_start); return CAMERA_METADATA_VALIDATION_ERROR; } const metadata_uptrdiff_t data_end = metadata->data_start + metadata->data_capacity; if (data_end < metadata->data_start || // overflow check data_end > metadata->size) { ALOGE("%s: Data start + capacity (%" PRIu32 ") should be <= total size " "(%" PRIu32 ")", __FUNCTION__, (metadata->data_start + metadata->data_capacity), metadata->size); return CAMERA_METADATA_VALIDATION_ERROR; } // Validate each entry const metadata_size_t entry_count = metadata->entry_count; camera_metadata_buffer_entry_t *entries = get_entries(metadata); for (size_t i = 0; i < entry_count; ++i) { if ((uintptr_t)&entries[i] + alignmentOffset != ALIGN_TO((uintptr_t)&entries[i] + alignmentOffset, ENTRY_ALIGNMENT)) { ALOGE("%s: Entry index %zu had bad alignment (address %p)," " expected alignment %zu", __FUNCTION__, i, &entries[i], ENTRY_ALIGNMENT); return CAMERA_METADATA_VALIDATION_ERROR; } camera_metadata_buffer_entry_t entry = entries[i]; if (entry.type >= NUM_TYPES) { ALOGE("%s: Entry index %zu had a bad type %d", __FUNCTION__, i, entry.type); return CAMERA_METADATA_VALIDATION_ERROR; } // TODO: fix vendor_tag_ops across processes so we don't need to special // case vendor-specific tags uint32_t tag_section = entry.tag >> 16; int tag_type = get_local_camera_metadata_tag_type(entry.tag, metadata); if (tag_type != (int)entry.type && tag_section < VENDOR_SECTION) { ALOGE("%s: Entry index %zu had tag type %d, but the type was %d", __FUNCTION__, i, tag_type, entry.type); return CAMERA_METADATA_VALIDATION_ERROR; } size_t data_size; if (validate_and_calculate_camera_metadata_entry_data_size(&data_size, entry.type, entry.count) != OK) { ALOGE("%s: Entry data size is invalid. type: %u count: %u", __FUNCTION__, entry.type, entry.count); return CAMERA_METADATA_VALIDATION_ERROR; } if (data_size != 0) { camera_metadata_data_t *data = (camera_metadata_data_t*) (get_data(metadata) + entry.data.offset); if ((uintptr_t)data + alignmentOffset != ALIGN_TO((uintptr_t)data + alignmentOffset, DATA_ALIGNMENT)) { ALOGE("%s: Entry index %zu had bad data alignment (address %p)," " expected align %zu, (tag name %s, data size %zu)", __FUNCTION__, i, data, DATA_ALIGNMENT, get_local_camera_metadata_tag_name(entry.tag, metadata) ? : "unknown", data_size); return CAMERA_METADATA_VALIDATION_ERROR; } size_t data_entry_end = entry.data.offset + data_size; if (data_entry_end < entry.data.offset || // overflow check data_entry_end > metadata->data_capacity) { ALOGE("%s: Entry index %zu data ends (%zu) beyond the capacity " "%" PRIu32, __FUNCTION__, i, data_entry_end, metadata->data_capacity); return CAMERA_METADATA_VALIDATION_ERROR; } } else if (entry.count == 0) { if (entry.data.offset != 0) { ALOGE("%s: Entry index %zu had 0 items, but offset was non-0 " "(%" PRIu32 "), tag name: %s", __FUNCTION__, i, entry.data.offset, get_local_camera_metadata_tag_name(entry.tag, metadata) ? : "unknown"); return CAMERA_METADATA_VALIDATION_ERROR; } } // else data stored inline, so we look at value which can be anything. } if (alignmentOffset == 0) { return OK; } return CAMERA_METADATA_VALIDATION_SHIFTED; } int append_camera_metadata(camera_metadata_t *dst, const camera_metadata_t *src) { if (dst == NULL || src == NULL ) return ERROR; // Check for overflow if (src->entry_count + dst->entry_count < src->entry_count) return ERROR; if (src->data_count + dst->data_count < src->data_count) return ERROR; // Check for space if (dst->entry_capacity < src->entry_count + dst->entry_count) return ERROR; if (dst->data_capacity < src->data_count + dst->data_count) return ERROR; if ((dst->vendor_id != CAMERA_METADATA_INVALID_VENDOR_ID) && (src->vendor_id != CAMERA_METADATA_INVALID_VENDOR_ID)) { if (dst->vendor_id != src->vendor_id) { ALOGE("%s: Append for metadata from different vendors is" "not supported!", __func__); return ERROR; } } memcpy(get_entries(dst) + dst->entry_count, get_entries(src), sizeof(camera_metadata_buffer_entry_t[src->entry_count])); memcpy(get_data(dst) + dst->data_count, get_data(src), sizeof(uint8_t[src->data_count])); if (dst->data_count != 0) { camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count; for (size_t i = 0; i < src->entry_count; i++, entry++) { if ( calculate_camera_metadata_entry_data_size(entry->type, entry->count) > 0 ) { entry->data.offset += dst->data_count; } } } if (dst->entry_count == 0) { // Appending onto empty buffer, keep sorted state dst->flags |= src->flags & FLAG_SORTED; } else if (src->entry_count != 0) { // Both src, dst are nonempty, cannot assume sort remains dst->flags &= ~FLAG_SORTED; } else { // Src is empty, keep dst sorted state } dst->entry_count += src->entry_count; dst->data_count += src->data_count; if (dst->vendor_id == CAMERA_METADATA_INVALID_VENDOR_ID) { dst->vendor_id = src->vendor_id; } assert(validate_camera_metadata_structure(dst, NULL) == OK); return OK; } camera_metadata_t *clone_camera_metadata(const camera_metadata_t *src) { int res; if (src == NULL) return NULL; camera_metadata_t *clone = allocate_camera_metadata( get_camera_metadata_entry_count(src), get_camera_metadata_data_count(src)); if (clone != NULL) { res = append_camera_metadata(clone, src); if (res != OK) { free_camera_metadata(clone); clone = NULL; } } assert(validate_camera_metadata_structure(clone, NULL) == OK); return clone; } static int add_camera_metadata_entry_raw(camera_metadata_t *dst, uint32_t tag, uint8_t type, const void *data, size_t data_count) { if (dst == NULL) return ERROR; if (dst->entry_count == dst->entry_capacity) return ERROR; if (data_count && data == NULL) return ERROR; size_t data_bytes = calculate_camera_metadata_entry_data_size(type, data_count); if (data_bytes + dst->data_count > dst->data_capacity) return ERROR; size_t data_payload_bytes = data_count * camera_metadata_type_size[type]; camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count; memset(entry, 0, sizeof(camera_metadata_buffer_entry_t)); entry->tag = tag; entry->type = type; entry->count = data_count; if (data_bytes == 0) { memcpy(entry->data.value, data, data_payload_bytes); } else { entry->data.offset = dst->data_count; memcpy(get_data(dst) + entry->data.offset, data, data_payload_bytes); dst->data_count += data_bytes; } dst->entry_count++; dst->flags &= ~FLAG_SORTED; assert(validate_camera_metadata_structure(dst, NULL) == OK); return OK; } int add_camera_metadata_entry(camera_metadata_t *dst, uint32_t tag, const void *data, size_t data_count) { int type = get_local_camera_metadata_tag_type(tag, dst); if (type == -1) { ALOGE("%s: Unknown tag %04x.", __FUNCTION__, tag); return ERROR; } return add_camera_metadata_entry_raw(dst, tag, type, data, data_count); } static int compare_entry_tags(const void *p1, const void *p2) { uint32_t tag1 = ((camera_metadata_buffer_entry_t*)p1)->tag; uint32_t tag2 = ((camera_metadata_buffer_entry_t*)p2)->tag; return tag1 < tag2 ? -1 : tag1 == tag2 ? 0 : 1; } int sort_camera_metadata(camera_metadata_t *dst) { if (dst == NULL) return ERROR; if (dst->flags & FLAG_SORTED) return OK; qsort(get_entries(dst), dst->entry_count, sizeof(camera_metadata_buffer_entry_t), compare_entry_tags); dst->flags |= FLAG_SORTED; assert(validate_camera_metadata_structure(dst, NULL) == OK); return OK; } int get_camera_metadata_entry(camera_metadata_t *src, size_t index, camera_metadata_entry_t *entry) { if (src == NULL || entry == NULL) return ERROR; if (index >= src->entry_count) return ERROR; camera_metadata_buffer_entry_t *buffer_entry = get_entries(src) + index; entry->index = index; entry->tag = buffer_entry->tag; entry->type = buffer_entry->type; entry->count = buffer_entry->count; if (buffer_entry->count * camera_metadata_type_size[buffer_entry->type] > 4) { entry->data.u8 = get_data(src) + buffer_entry->data.offset; } else { entry->data.u8 = buffer_entry->data.value; } return OK; } int get_camera_metadata_ro_entry(const camera_metadata_t *src, size_t index, camera_metadata_ro_entry_t *entry) { return get_camera_metadata_entry((camera_metadata_t*)src, index, (camera_metadata_entry_t*)entry); } int find_camera_metadata_entry(camera_metadata_t *src, uint32_t tag, camera_metadata_entry_t *entry) { if (src == NULL) return ERROR; uint32_t index; if (src->flags & FLAG_SORTED) { // Sorted entries, do a binary search camera_metadata_buffer_entry_t *search_entry = NULL; camera_metadata_buffer_entry_t key; key.tag = tag; search_entry = bsearch(&key, get_entries(src), src->entry_count, sizeof(camera_metadata_buffer_entry_t), compare_entry_tags); if (search_entry == NULL) return NOT_FOUND; index = search_entry - get_entries(src); } else { // Not sorted, linear search camera_metadata_buffer_entry_t *search_entry = get_entries(src); for (index = 0; index < src->entry_count; index++, search_entry++) { if (search_entry->tag == tag) { break; } } if (index == src->entry_count) return NOT_FOUND; } return get_camera_metadata_entry(src, index, entry); } int find_camera_metadata_ro_entry(const camera_metadata_t *src, uint32_t tag, camera_metadata_ro_entry_t *entry) { return find_camera_metadata_entry((camera_metadata_t*)src, tag, (camera_metadata_entry_t*)entry); } int delete_camera_metadata_entry(camera_metadata_t *dst, size_t index) { if (dst == NULL) return ERROR; if (index >= dst->entry_count) return ERROR; camera_metadata_buffer_entry_t *entry = get_entries(dst) + index; size_t data_bytes = calculate_camera_metadata_entry_data_size(entry->type, entry->count); if (data_bytes > 0) { // Shift data buffer to overwrite deleted data uint8_t *start = get_data(dst) + entry->data.offset; uint8_t *end = start + data_bytes; size_t length = dst->data_count - entry->data.offset - data_bytes; memmove(start, end, length); // Update all entry indices to account for shift camera_metadata_buffer_entry_t *e = get_entries(dst); size_t i; for (i = 0; i < dst->entry_count; i++) { if (calculate_camera_metadata_entry_data_size( e->type, e->count) > 0 && e->data.offset > entry->data.offset) { e->data.offset -= data_bytes; } ++e; } dst->data_count -= data_bytes; } // Shift entry array memmove(entry, entry + 1, sizeof(camera_metadata_buffer_entry_t) * (dst->entry_count - index - 1) ); dst->entry_count -= 1; assert(validate_camera_metadata_structure(dst, NULL) == OK); return OK; } int update_camera_metadata_entry(camera_metadata_t *dst, size_t index, const void *data, size_t data_count, camera_metadata_entry_t *updated_entry) { if (dst == NULL) return ERROR; if (index >= dst->entry_count) return ERROR; camera_metadata_buffer_entry_t *entry = get_entries(dst) + index; size_t data_bytes = calculate_camera_metadata_entry_data_size(entry->type, data_count); size_t data_payload_bytes = data_count * camera_metadata_type_size[entry->type]; size_t entry_bytes = calculate_camera_metadata_entry_data_size(entry->type, entry->count); if (data_bytes != entry_bytes) { // May need to shift/add to data array if (dst->data_capacity < dst->data_count + data_bytes - entry_bytes) { // No room return ERROR; } if (entry_bytes != 0) { // Remove old data uint8_t *start = get_data(dst) + entry->data.offset; uint8_t *end = start + entry_bytes; size_t length = dst->data_count - entry->data.offset - entry_bytes; memmove(start, end, length); dst->data_count -= entry_bytes; // Update all entry indices to account for shift camera_metadata_buffer_entry_t *e = get_entries(dst); size_t i; for (i = 0; i < dst->entry_count; i++) { if (calculate_camera_metadata_entry_data_size( e->type, e->count) > 0 && e->data.offset > entry->data.offset) { e->data.offset -= entry_bytes; } ++e; } } if (data_bytes != 0) { // Append new data entry->data.offset = dst->data_count; memcpy(get_data(dst) + entry->data.offset, data, data_payload_bytes); dst->data_count += data_bytes; } } else if (data_bytes != 0) { // data size unchanged, reuse same data location memcpy(get_data(dst) + entry->data.offset, data, data_payload_bytes); } if (data_bytes == 0) { // Data fits into entry memcpy(entry->data.value, data, data_payload_bytes); } entry->count = data_count; if (updated_entry != NULL) { get_camera_metadata_entry(dst, index, updated_entry); } assert(validate_camera_metadata_structure(dst, NULL) == OK); return OK; } static const vendor_tag_ops_t *vendor_tag_ops = NULL; static const struct vendor_tag_cache_ops *vendor_cache_ops = NULL; // Declared in system/media/private/camera/include/camera_metadata_hidden.h const char *get_local_camera_metadata_section_name_vendor_id(uint32_t tag, metadata_vendor_id_t id) { uint32_t tag_section = tag >> 16; if (tag_section >= VENDOR_SECTION && vendor_cache_ops != NULL && id != CAMERA_METADATA_INVALID_VENDOR_ID) { return vendor_cache_ops->get_section_name(tag, id); } else if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) { return vendor_tag_ops->get_section_name( vendor_tag_ops, tag); } if (tag_section >= ANDROID_SECTION_COUNT) { return NULL; } return camera_metadata_section_names[tag_section]; } // Declared in system/media/private/camera/include/camera_metadata_hidden.h const char *get_local_camera_metadata_tag_name_vendor_id(uint32_t tag, metadata_vendor_id_t id) { uint32_t tag_section = tag >> 16; if (tag_section >= VENDOR_SECTION && vendor_cache_ops != NULL && id != CAMERA_METADATA_INVALID_VENDOR_ID) { return vendor_cache_ops->get_tag_name(tag, id); } else if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) { return vendor_tag_ops->get_tag_name( vendor_tag_ops, tag); } if (tag_section >= ANDROID_SECTION_COUNT || tag >= camera_metadata_section_bounds[tag_section][1] ) { return NULL; } uint32_t tag_index = tag & 0xFFFF; return tag_info[tag_section][tag_index].tag_name; } // Declared in system/media/private/camera/include/camera_metadata_hidden.h int get_local_camera_metadata_tag_type_vendor_id(uint32_t tag, metadata_vendor_id_t id) { uint32_t tag_section = tag >> 16; if (tag_section >= VENDOR_SECTION && vendor_cache_ops != NULL && id != CAMERA_METADATA_INVALID_VENDOR_ID) { return vendor_cache_ops->get_tag_type(tag, id); } else if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) { return vendor_tag_ops->get_tag_type( vendor_tag_ops, tag); } if (tag_section >= ANDROID_SECTION_COUNT || tag >= camera_metadata_section_bounds[tag_section][1] ) { return -1; } uint32_t tag_index = tag & 0xFFFF; return tag_info[tag_section][tag_index].tag_type; } const char *get_camera_metadata_section_name(uint32_t tag) { return get_local_camera_metadata_section_name(tag, NULL); } const char *get_camera_metadata_tag_name(uint32_t tag) { return get_local_camera_metadata_tag_name(tag, NULL); } int get_camera_metadata_tag_type(uint32_t tag) { return get_local_camera_metadata_tag_type(tag, NULL); } const char *get_local_camera_metadata_section_name(uint32_t tag, const camera_metadata_t *meta) { metadata_vendor_id_t id = (NULL == meta) ? CAMERA_METADATA_INVALID_VENDOR_ID : meta->vendor_id; return get_local_camera_metadata_section_name_vendor_id(tag, id); } const char *get_local_camera_metadata_tag_name(uint32_t tag, const camera_metadata_t *meta) { metadata_vendor_id_t id = (NULL == meta) ? CAMERA_METADATA_INVALID_VENDOR_ID : meta->vendor_id; return get_local_camera_metadata_tag_name_vendor_id(tag, id); } int get_local_camera_metadata_tag_type(uint32_t tag, const camera_metadata_t *meta) { metadata_vendor_id_t id = (NULL == meta) ? CAMERA_METADATA_INVALID_VENDOR_ID : meta->vendor_id; return get_local_camera_metadata_tag_type_vendor_id(tag, id); } int set_camera_metadata_vendor_tag_ops(const vendor_tag_query_ops_t* ops) { // **DEPRECATED** (void) ops; ALOGE("%s: This function has been deprecated", __FUNCTION__); return ERROR; } // Declared in system/media/private/camera/include/camera_metadata_hidden.h int set_camera_metadata_vendor_ops(const vendor_tag_ops_t* ops) { vendor_tag_ops = ops; return OK; } // Declared in system/media/private/camera/include/camera_metadata_hidden.h int set_camera_metadata_vendor_cache_ops( const struct vendor_tag_cache_ops *query_cache_ops) { vendor_cache_ops = query_cache_ops; return OK; } // Declared in system/media/private/camera/include/camera_metadata_hidden.h void set_camera_metadata_vendor_id(camera_metadata_t *meta, metadata_vendor_id_t id) { if (NULL != meta) { meta->vendor_id = id; } } // Declared in system/media/private/camera/include/camera_metadata_hidden.h metadata_vendor_id_t get_camera_metadata_vendor_id( const camera_metadata_t *meta) { metadata_vendor_id_t ret = CAMERA_METADATA_INVALID_VENDOR_ID; if (NULL != meta) { ret = meta->vendor_id; } return ret; } static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type, int count, int indentation); void dump_camera_metadata(const camera_metadata_t *metadata, int fd, int verbosity) { dump_indented_camera_metadata(metadata, fd, verbosity, 0); } void dump_indented_camera_metadata(const camera_metadata_t *metadata, int fd, int verbosity, int indentation) { if (metadata == NULL) { dprintf(fd, "%*sDumping camera metadata array: Not allocated\n", indentation, ""); return; } unsigned int i; dprintf(fd, "%*sDumping camera metadata array: %" PRIu32 " / %" PRIu32 " entries, " "%" PRIu32 " / %" PRIu32 " bytes of extra data.\n", indentation, "", metadata->entry_count, metadata->entry_capacity, metadata->data_count, metadata->data_capacity); dprintf(fd, "%*sVersion: %d, Flags: %08x\n", indentation + 2, "", metadata->version, metadata->flags); camera_metadata_buffer_entry_t *entry = get_entries(metadata); for (i=0; i < metadata->entry_count; i++, entry++) { const char *tag_name, *tag_section; tag_section = get_local_camera_metadata_section_name(entry->tag, metadata); if (tag_section == NULL) { tag_section = "unknownSection"; } tag_name = get_local_camera_metadata_tag_name(entry->tag, metadata); if (tag_name == NULL) { tag_name = "unknownTag"; } const char *type_name; if (entry->type >= NUM_TYPES) { type_name = "unknown"; } else { type_name = camera_metadata_type_names[entry->type]; } dprintf(fd, "%*s%s.%s (%05x): %s[%" PRIu32 "]\n", indentation + 2, "", tag_section, tag_name, entry->tag, type_name, entry->count); if (verbosity < 1) continue; if (entry->type >= NUM_TYPES) continue; size_t type_size = camera_metadata_type_size[entry->type]; uint8_t *data_ptr; if ( type_size * entry->count > 4 ) { if (entry->data.offset >= metadata->data_count) { ALOGE("%s: Malformed entry data offset: %" PRIu32 " (max %" PRIu32 ")", __FUNCTION__, entry->data.offset, metadata->data_count); continue; } data_ptr = get_data(metadata) + entry->data.offset; } else { data_ptr = entry->data.value; } int count = entry->count; if (verbosity < 2 && count > 16) count = 16; print_data(fd, data_ptr, entry->tag, entry->type, count, indentation); } } static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type, int count, int indentation) { static int values_per_line[NUM_TYPES] = { [TYPE_BYTE] = 16, [TYPE_INT32] = 4, [TYPE_FLOAT] = 8, [TYPE_INT64] = 2, [TYPE_DOUBLE] = 4, [TYPE_RATIONAL] = 2, }; size_t type_size = camera_metadata_type_size[type]; char value_string_tmp[CAMERA_METADATA_ENUM_STRING_MAX_SIZE]; uint32_t value; int lines = count / values_per_line[type]; if (count % values_per_line[type] != 0) lines++; int index = 0; int j, k; for (j = 0; j < lines; j++) { dprintf(fd, "%*s[", indentation + 4, ""); for (k = 0; k < values_per_line[type] && count > 0; k++, count--, index += type_size) { switch (type) { case TYPE_BYTE: value = *(data_ptr + index); if (camera_metadata_enum_snprint(tag, value, value_string_tmp, sizeof(value_string_tmp)) == OK) { dprintf(fd, "%s ", value_string_tmp); } else { dprintf(fd, "%hhu ", *(data_ptr + index)); } break; case TYPE_INT32: value = *(int32_t*)(data_ptr + index); if (camera_metadata_enum_snprint(tag, value, value_string_tmp, sizeof(value_string_tmp)) == OK) { dprintf(fd, "%s ", value_string_tmp); } else { dprintf(fd, "%" PRId32 " ", *(int32_t*)(data_ptr + index)); } break; case TYPE_FLOAT: dprintf(fd, "%0.8f ", *(float*)(data_ptr + index)); break; case TYPE_INT64: dprintf(fd, "%" PRId64 " ", *(int64_t*)(data_ptr + index)); break; case TYPE_DOUBLE: dprintf(fd, "%0.8f ", *(double*)(data_ptr + index)); break; case TYPE_RATIONAL: { int32_t numerator = *(int32_t*)(data_ptr + index); int32_t denominator = *(int32_t*)(data_ptr + index + 4); dprintf(fd, "(%d / %d) ", numerator, denominator); break; } default: dprintf(fd, "??? "); } } dprintf(fd, "]\n"); } }
0
repos/libcamera/src/android
repos/libcamera/src/android/metadata/camera_metadata_tag_info.c
/* SPDX-License-Identifier: Apache-2.0 */ /* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * !! Do not reference this file directly !! * * It is logically a part of camera_metadata.c. It is broken out for ease of * maintaining the tag info. * * Array assignments are done using specified-index syntax to keep things in * sync with camera_metadata_tags.h */ /** * ! Do not edit this file directly ! * * Generated automatically from camera_metadata_tag_info.mako */ const char *camera_metadata_section_names[ANDROID_SECTION_COUNT] = { [ANDROID_COLOR_CORRECTION] = "android.colorCorrection", [ANDROID_CONTROL] = "android.control", [ANDROID_DEMOSAIC] = "android.demosaic", [ANDROID_EDGE] = "android.edge", [ANDROID_FLASH] = "android.flash", [ANDROID_FLASH_INFO] = "android.flash.info", [ANDROID_HOT_PIXEL] = "android.hotPixel", [ANDROID_JPEG] = "android.jpeg", [ANDROID_LENS] = "android.lens", [ANDROID_LENS_INFO] = "android.lens.info", [ANDROID_NOISE_REDUCTION] = "android.noiseReduction", [ANDROID_QUIRKS] = "android.quirks", [ANDROID_REQUEST] = "android.request", [ANDROID_SCALER] = "android.scaler", [ANDROID_SENSOR] = "android.sensor", [ANDROID_SENSOR_INFO] = "android.sensor.info", [ANDROID_SHADING] = "android.shading", [ANDROID_STATISTICS] = "android.statistics", [ANDROID_STATISTICS_INFO] = "android.statistics.info", [ANDROID_TONEMAP] = "android.tonemap", [ANDROID_LED] = "android.led", [ANDROID_INFO] = "android.info", [ANDROID_BLACK_LEVEL] = "android.blackLevel", [ANDROID_SYNC] = "android.sync", [ANDROID_REPROCESS] = "android.reprocess", [ANDROID_DEPTH] = "android.depth", [ANDROID_LOGICAL_MULTI_CAMERA] = "android.logicalMultiCamera", [ANDROID_DISTORTION_CORRECTION] = "android.distortionCorrection", }; unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = { [ANDROID_COLOR_CORRECTION] = { ANDROID_COLOR_CORRECTION_START, ANDROID_COLOR_CORRECTION_END }, [ANDROID_CONTROL] = { ANDROID_CONTROL_START, ANDROID_CONTROL_END }, [ANDROID_DEMOSAIC] = { ANDROID_DEMOSAIC_START, ANDROID_DEMOSAIC_END }, [ANDROID_EDGE] = { ANDROID_EDGE_START, ANDROID_EDGE_END }, [ANDROID_FLASH] = { ANDROID_FLASH_START, ANDROID_FLASH_END }, [ANDROID_FLASH_INFO] = { ANDROID_FLASH_INFO_START, ANDROID_FLASH_INFO_END }, [ANDROID_HOT_PIXEL] = { ANDROID_HOT_PIXEL_START, ANDROID_HOT_PIXEL_END }, [ANDROID_JPEG] = { ANDROID_JPEG_START, ANDROID_JPEG_END }, [ANDROID_LENS] = { ANDROID_LENS_START, ANDROID_LENS_END }, [ANDROID_LENS_INFO] = { ANDROID_LENS_INFO_START, ANDROID_LENS_INFO_END }, [ANDROID_NOISE_REDUCTION] = { ANDROID_NOISE_REDUCTION_START, ANDROID_NOISE_REDUCTION_END }, [ANDROID_QUIRKS] = { ANDROID_QUIRKS_START, ANDROID_QUIRKS_END }, [ANDROID_REQUEST] = { ANDROID_REQUEST_START, ANDROID_REQUEST_END }, [ANDROID_SCALER] = { ANDROID_SCALER_START, ANDROID_SCALER_END }, [ANDROID_SENSOR] = { ANDROID_SENSOR_START, ANDROID_SENSOR_END }, [ANDROID_SENSOR_INFO] = { ANDROID_SENSOR_INFO_START, ANDROID_SENSOR_INFO_END }, [ANDROID_SHADING] = { ANDROID_SHADING_START, ANDROID_SHADING_END }, [ANDROID_STATISTICS] = { ANDROID_STATISTICS_START, ANDROID_STATISTICS_END }, [ANDROID_STATISTICS_INFO] = { ANDROID_STATISTICS_INFO_START, ANDROID_STATISTICS_INFO_END }, [ANDROID_TONEMAP] = { ANDROID_TONEMAP_START, ANDROID_TONEMAP_END }, [ANDROID_LED] = { ANDROID_LED_START, ANDROID_LED_END }, [ANDROID_INFO] = { ANDROID_INFO_START, ANDROID_INFO_END }, [ANDROID_BLACK_LEVEL] = { ANDROID_BLACK_LEVEL_START, ANDROID_BLACK_LEVEL_END }, [ANDROID_SYNC] = { ANDROID_SYNC_START, ANDROID_SYNC_END }, [ANDROID_REPROCESS] = { ANDROID_REPROCESS_START, ANDROID_REPROCESS_END }, [ANDROID_DEPTH] = { ANDROID_DEPTH_START, ANDROID_DEPTH_END }, [ANDROID_LOGICAL_MULTI_CAMERA] = { ANDROID_LOGICAL_MULTI_CAMERA_START, ANDROID_LOGICAL_MULTI_CAMERA_END }, [ANDROID_DISTORTION_CORRECTION] = { ANDROID_DISTORTION_CORRECTION_START, ANDROID_DISTORTION_CORRECTION_END }, }; static tag_info_t android_color_correction[ANDROID_COLOR_CORRECTION_END - ANDROID_COLOR_CORRECTION_START] = { [ ANDROID_COLOR_CORRECTION_MODE - ANDROID_COLOR_CORRECTION_START ] = { "mode", TYPE_BYTE }, [ ANDROID_COLOR_CORRECTION_TRANSFORM - ANDROID_COLOR_CORRECTION_START ] = { "transform", TYPE_RATIONAL }, [ ANDROID_COLOR_CORRECTION_GAINS - ANDROID_COLOR_CORRECTION_START ] = { "gains", TYPE_FLOAT }, [ ANDROID_COLOR_CORRECTION_ABERRATION_MODE - ANDROID_COLOR_CORRECTION_START ] = { "aberrationMode", TYPE_BYTE }, [ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES - ANDROID_COLOR_CORRECTION_START ] = { "availableAberrationModes", TYPE_BYTE }, }; static tag_info_t android_control[ANDROID_CONTROL_END - ANDROID_CONTROL_START] = { [ ANDROID_CONTROL_AE_ANTIBANDING_MODE - ANDROID_CONTROL_START ] = { "aeAntibandingMode", TYPE_BYTE }, [ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION - ANDROID_CONTROL_START ] = { "aeExposureCompensation", TYPE_INT32 }, [ ANDROID_CONTROL_AE_LOCK - ANDROID_CONTROL_START ] = { "aeLock", TYPE_BYTE }, [ ANDROID_CONTROL_AE_MODE - ANDROID_CONTROL_START ] = { "aeMode", TYPE_BYTE }, [ ANDROID_CONTROL_AE_REGIONS - ANDROID_CONTROL_START ] = { "aeRegions", TYPE_INT32 }, [ ANDROID_CONTROL_AE_TARGET_FPS_RANGE - ANDROID_CONTROL_START ] = { "aeTargetFpsRange", TYPE_INT32 }, [ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER - ANDROID_CONTROL_START ] = { "aePrecaptureTrigger", TYPE_BYTE }, [ ANDROID_CONTROL_AF_MODE - ANDROID_CONTROL_START ] = { "afMode", TYPE_BYTE }, [ ANDROID_CONTROL_AF_REGIONS - ANDROID_CONTROL_START ] = { "afRegions", TYPE_INT32 }, [ ANDROID_CONTROL_AF_TRIGGER - ANDROID_CONTROL_START ] = { "afTrigger", TYPE_BYTE }, [ ANDROID_CONTROL_AWB_LOCK - ANDROID_CONTROL_START ] = { "awbLock", TYPE_BYTE }, [ ANDROID_CONTROL_AWB_MODE - ANDROID_CONTROL_START ] = { "awbMode", TYPE_BYTE }, [ ANDROID_CONTROL_AWB_REGIONS - ANDROID_CONTROL_START ] = { "awbRegions", TYPE_INT32 }, [ ANDROID_CONTROL_CAPTURE_INTENT - ANDROID_CONTROL_START ] = { "captureIntent", TYPE_BYTE }, [ ANDROID_CONTROL_EFFECT_MODE - ANDROID_CONTROL_START ] = { "effectMode", TYPE_BYTE }, [ ANDROID_CONTROL_MODE - ANDROID_CONTROL_START ] = { "mode", TYPE_BYTE }, [ ANDROID_CONTROL_SCENE_MODE - ANDROID_CONTROL_START ] = { "sceneMode", TYPE_BYTE }, [ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE - ANDROID_CONTROL_START ] = { "videoStabilizationMode", TYPE_BYTE }, [ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES - ANDROID_CONTROL_START ] = { "aeAvailableAntibandingModes", TYPE_BYTE }, [ ANDROID_CONTROL_AE_AVAILABLE_MODES - ANDROID_CONTROL_START ] = { "aeAvailableModes", TYPE_BYTE }, [ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES - ANDROID_CONTROL_START ] = { "aeAvailableTargetFpsRanges", TYPE_INT32 }, [ ANDROID_CONTROL_AE_COMPENSATION_RANGE - ANDROID_CONTROL_START ] = { "aeCompensationRange", TYPE_INT32 }, [ ANDROID_CONTROL_AE_COMPENSATION_STEP - ANDROID_CONTROL_START ] = { "aeCompensationStep", TYPE_RATIONAL }, [ ANDROID_CONTROL_AF_AVAILABLE_MODES - ANDROID_CONTROL_START ] = { "afAvailableModes", TYPE_BYTE }, [ ANDROID_CONTROL_AVAILABLE_EFFECTS - ANDROID_CONTROL_START ] = { "availableEffects", TYPE_BYTE }, [ ANDROID_CONTROL_AVAILABLE_SCENE_MODES - ANDROID_CONTROL_START ] = { "availableSceneModes", TYPE_BYTE }, [ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES - ANDROID_CONTROL_START ] = { "availableVideoStabilizationModes", TYPE_BYTE }, [ ANDROID_CONTROL_AWB_AVAILABLE_MODES - ANDROID_CONTROL_START ] = { "awbAvailableModes", TYPE_BYTE }, [ ANDROID_CONTROL_MAX_REGIONS - ANDROID_CONTROL_START ] = { "maxRegions", TYPE_INT32 }, [ ANDROID_CONTROL_SCENE_MODE_OVERRIDES - ANDROID_CONTROL_START ] = { "sceneModeOverrides", TYPE_BYTE }, [ ANDROID_CONTROL_AE_PRECAPTURE_ID - ANDROID_CONTROL_START ] = { "aePrecaptureId", TYPE_INT32 }, [ ANDROID_CONTROL_AE_STATE - ANDROID_CONTROL_START ] = { "aeState", TYPE_BYTE }, [ ANDROID_CONTROL_AF_STATE - ANDROID_CONTROL_START ] = { "afState", TYPE_BYTE }, [ ANDROID_CONTROL_AF_TRIGGER_ID - ANDROID_CONTROL_START ] = { "afTriggerId", TYPE_INT32 }, [ ANDROID_CONTROL_AWB_STATE - ANDROID_CONTROL_START ] = { "awbState", TYPE_BYTE }, [ ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS - ANDROID_CONTROL_START ] = { "availableHighSpeedVideoConfigurations", TYPE_INT32 }, [ ANDROID_CONTROL_AE_LOCK_AVAILABLE - ANDROID_CONTROL_START ] = { "aeLockAvailable", TYPE_BYTE }, [ ANDROID_CONTROL_AWB_LOCK_AVAILABLE - ANDROID_CONTROL_START ] = { "awbLockAvailable", TYPE_BYTE }, [ ANDROID_CONTROL_AVAILABLE_MODES - ANDROID_CONTROL_START ] = { "availableModes", TYPE_BYTE }, [ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE - ANDROID_CONTROL_START ] = { "postRawSensitivityBoostRange", TYPE_INT32 }, [ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST - ANDROID_CONTROL_START ] = { "postRawSensitivityBoost", TYPE_INT32 }, [ ANDROID_CONTROL_ENABLE_ZSL - ANDROID_CONTROL_START ] = { "enableZsl", TYPE_BYTE }, [ ANDROID_CONTROL_AF_SCENE_CHANGE - ANDROID_CONTROL_START ] = { "afSceneChange", TYPE_BYTE }, }; static tag_info_t android_demosaic[ANDROID_DEMOSAIC_END - ANDROID_DEMOSAIC_START] = { [ ANDROID_DEMOSAIC_MODE - ANDROID_DEMOSAIC_START ] = { "mode", TYPE_BYTE }, }; static tag_info_t android_edge[ANDROID_EDGE_END - ANDROID_EDGE_START] = { [ ANDROID_EDGE_MODE - ANDROID_EDGE_START ] = { "mode", TYPE_BYTE }, [ ANDROID_EDGE_STRENGTH - ANDROID_EDGE_START ] = { "strength", TYPE_BYTE }, [ ANDROID_EDGE_AVAILABLE_EDGE_MODES - ANDROID_EDGE_START ] = { "availableEdgeModes", TYPE_BYTE }, }; static tag_info_t android_flash[ANDROID_FLASH_END - ANDROID_FLASH_START] = { [ ANDROID_FLASH_FIRING_POWER - ANDROID_FLASH_START ] = { "firingPower", TYPE_BYTE }, [ ANDROID_FLASH_FIRING_TIME - ANDROID_FLASH_START ] = { "firingTime", TYPE_INT64 }, [ ANDROID_FLASH_MODE - ANDROID_FLASH_START ] = { "mode", TYPE_BYTE }, [ ANDROID_FLASH_COLOR_TEMPERATURE - ANDROID_FLASH_START ] = { "colorTemperature", TYPE_BYTE }, [ ANDROID_FLASH_MAX_ENERGY - ANDROID_FLASH_START ] = { "maxEnergy", TYPE_BYTE }, [ ANDROID_FLASH_STATE - ANDROID_FLASH_START ] = { "state", TYPE_BYTE }, }; static tag_info_t android_flash_info[ANDROID_FLASH_INFO_END - ANDROID_FLASH_INFO_START] = { [ ANDROID_FLASH_INFO_AVAILABLE - ANDROID_FLASH_INFO_START ] = { "available", TYPE_BYTE }, [ ANDROID_FLASH_INFO_CHARGE_DURATION - ANDROID_FLASH_INFO_START ] = { "chargeDuration", TYPE_INT64 }, }; static tag_info_t android_hot_pixel[ANDROID_HOT_PIXEL_END - ANDROID_HOT_PIXEL_START] = { [ ANDROID_HOT_PIXEL_MODE - ANDROID_HOT_PIXEL_START ] = { "mode", TYPE_BYTE }, [ ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES - ANDROID_HOT_PIXEL_START ] = { "availableHotPixelModes", TYPE_BYTE }, }; static tag_info_t android_jpeg[ANDROID_JPEG_END - ANDROID_JPEG_START] = { [ ANDROID_JPEG_GPS_COORDINATES - ANDROID_JPEG_START ] = { "gpsCoordinates", TYPE_DOUBLE }, [ ANDROID_JPEG_GPS_PROCESSING_METHOD - ANDROID_JPEG_START ] = { "gpsProcessingMethod", TYPE_BYTE }, [ ANDROID_JPEG_GPS_TIMESTAMP - ANDROID_JPEG_START ] = { "gpsTimestamp", TYPE_INT64 }, [ ANDROID_JPEG_ORIENTATION - ANDROID_JPEG_START ] = { "orientation", TYPE_INT32 }, [ ANDROID_JPEG_QUALITY - ANDROID_JPEG_START ] = { "quality", TYPE_BYTE }, [ ANDROID_JPEG_THUMBNAIL_QUALITY - ANDROID_JPEG_START ] = { "thumbnailQuality", TYPE_BYTE }, [ ANDROID_JPEG_THUMBNAIL_SIZE - ANDROID_JPEG_START ] = { "thumbnailSize", TYPE_INT32 }, [ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES - ANDROID_JPEG_START ] = { "availableThumbnailSizes", TYPE_INT32 }, [ ANDROID_JPEG_MAX_SIZE - ANDROID_JPEG_START ] = { "maxSize", TYPE_INT32 }, [ ANDROID_JPEG_SIZE - ANDROID_JPEG_START ] = { "size", TYPE_INT32 }, }; static tag_info_t android_lens[ANDROID_LENS_END - ANDROID_LENS_START] = { [ ANDROID_LENS_APERTURE - ANDROID_LENS_START ] = { "aperture", TYPE_FLOAT }, [ ANDROID_LENS_FILTER_DENSITY - ANDROID_LENS_START ] = { "filterDensity", TYPE_FLOAT }, [ ANDROID_LENS_FOCAL_LENGTH - ANDROID_LENS_START ] = { "focalLength", TYPE_FLOAT }, [ ANDROID_LENS_FOCUS_DISTANCE - ANDROID_LENS_START ] = { "focusDistance", TYPE_FLOAT }, [ ANDROID_LENS_OPTICAL_STABILIZATION_MODE - ANDROID_LENS_START ] = { "opticalStabilizationMode", TYPE_BYTE }, [ ANDROID_LENS_FACING - ANDROID_LENS_START ] = { "facing", TYPE_BYTE }, [ ANDROID_LENS_POSE_ROTATION - ANDROID_LENS_START ] = { "poseRotation", TYPE_FLOAT }, [ ANDROID_LENS_POSE_TRANSLATION - ANDROID_LENS_START ] = { "poseTranslation", TYPE_FLOAT }, [ ANDROID_LENS_FOCUS_RANGE - ANDROID_LENS_START ] = { "focusRange", TYPE_FLOAT }, [ ANDROID_LENS_STATE - ANDROID_LENS_START ] = { "state", TYPE_BYTE }, [ ANDROID_LENS_INTRINSIC_CALIBRATION - ANDROID_LENS_START ] = { "intrinsicCalibration", TYPE_FLOAT }, [ ANDROID_LENS_RADIAL_DISTORTION - ANDROID_LENS_START ] = { "radialDistortion", TYPE_FLOAT }, [ ANDROID_LENS_POSE_REFERENCE - ANDROID_LENS_START ] = { "poseReference", TYPE_BYTE }, [ ANDROID_LENS_DISTORTION - ANDROID_LENS_START ] = { "distortion", TYPE_FLOAT }, }; static tag_info_t android_lens_info[ANDROID_LENS_INFO_END - ANDROID_LENS_INFO_START] = { [ ANDROID_LENS_INFO_AVAILABLE_APERTURES - ANDROID_LENS_INFO_START ] = { "availableApertures", TYPE_FLOAT }, [ ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES - ANDROID_LENS_INFO_START ] = { "availableFilterDensities", TYPE_FLOAT }, [ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS - ANDROID_LENS_INFO_START ] = { "availableFocalLengths", TYPE_FLOAT }, [ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION - ANDROID_LENS_INFO_START ] = { "availableOpticalStabilization", TYPE_BYTE }, [ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE - ANDROID_LENS_INFO_START ] = { "hyperfocalDistance", TYPE_FLOAT }, [ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE - ANDROID_LENS_INFO_START ] = { "minimumFocusDistance", TYPE_FLOAT }, [ ANDROID_LENS_INFO_SHADING_MAP_SIZE - ANDROID_LENS_INFO_START ] = { "shadingMapSize", TYPE_INT32 }, [ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION - ANDROID_LENS_INFO_START ] = { "focusDistanceCalibration", TYPE_BYTE }, }; static tag_info_t android_noise_reduction[ANDROID_NOISE_REDUCTION_END - ANDROID_NOISE_REDUCTION_START] = { [ ANDROID_NOISE_REDUCTION_MODE - ANDROID_NOISE_REDUCTION_START ] = { "mode", TYPE_BYTE }, [ ANDROID_NOISE_REDUCTION_STRENGTH - ANDROID_NOISE_REDUCTION_START ] = { "strength", TYPE_BYTE }, [ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES - ANDROID_NOISE_REDUCTION_START ] = { "availableNoiseReductionModes", TYPE_BYTE }, }; static tag_info_t android_quirks[ANDROID_QUIRKS_END - ANDROID_QUIRKS_START] = { [ ANDROID_QUIRKS_METERING_CROP_REGION - ANDROID_QUIRKS_START ] = { "meteringCropRegion", TYPE_BYTE }, [ ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO - ANDROID_QUIRKS_START ] = { "triggerAfWithAuto", TYPE_BYTE }, [ ANDROID_QUIRKS_USE_ZSL_FORMAT - ANDROID_QUIRKS_START ] = { "useZslFormat", TYPE_BYTE }, [ ANDROID_QUIRKS_USE_PARTIAL_RESULT - ANDROID_QUIRKS_START ] = { "usePartialResult", TYPE_BYTE }, [ ANDROID_QUIRKS_PARTIAL_RESULT - ANDROID_QUIRKS_START ] = { "partialResult", TYPE_BYTE }, }; static tag_info_t android_request[ANDROID_REQUEST_END - ANDROID_REQUEST_START] = { [ ANDROID_REQUEST_FRAME_COUNT - ANDROID_REQUEST_START ] = { "frameCount", TYPE_INT32 }, [ ANDROID_REQUEST_ID - ANDROID_REQUEST_START ] = { "id", TYPE_INT32 }, [ ANDROID_REQUEST_INPUT_STREAMS - ANDROID_REQUEST_START ] = { "inputStreams", TYPE_INT32 }, [ ANDROID_REQUEST_METADATA_MODE - ANDROID_REQUEST_START ] = { "metadataMode", TYPE_BYTE }, [ ANDROID_REQUEST_OUTPUT_STREAMS - ANDROID_REQUEST_START ] = { "outputStreams", TYPE_INT32 }, [ ANDROID_REQUEST_TYPE - ANDROID_REQUEST_START ] = { "type", TYPE_BYTE }, [ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS - ANDROID_REQUEST_START ] = { "maxNumOutputStreams", TYPE_INT32 }, [ ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS - ANDROID_REQUEST_START ] = { "maxNumReprocessStreams", TYPE_INT32 }, [ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS - ANDROID_REQUEST_START ] = { "maxNumInputStreams", TYPE_INT32 }, [ ANDROID_REQUEST_PIPELINE_DEPTH - ANDROID_REQUEST_START ] = { "pipelineDepth", TYPE_BYTE }, [ ANDROID_REQUEST_PIPELINE_MAX_DEPTH - ANDROID_REQUEST_START ] = { "pipelineMaxDepth", TYPE_BYTE }, [ ANDROID_REQUEST_PARTIAL_RESULT_COUNT - ANDROID_REQUEST_START ] = { "partialResultCount", TYPE_INT32 }, [ ANDROID_REQUEST_AVAILABLE_CAPABILITIES - ANDROID_REQUEST_START ] = { "availableCapabilities", TYPE_BYTE }, [ ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS - ANDROID_REQUEST_START ] = { "availableRequestKeys", TYPE_INT32 }, [ ANDROID_REQUEST_AVAILABLE_RESULT_KEYS - ANDROID_REQUEST_START ] = { "availableResultKeys", TYPE_INT32 }, [ ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS - ANDROID_REQUEST_START ] = { "availableCharacteristicsKeys", TYPE_INT32 }, [ ANDROID_REQUEST_AVAILABLE_SESSION_KEYS - ANDROID_REQUEST_START ] = { "availableSessionKeys", TYPE_INT32 }, [ ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS - ANDROID_REQUEST_START ] = { "availablePhysicalCameraRequestKeys", TYPE_INT32 }, }; static tag_info_t android_scaler[ANDROID_SCALER_END - ANDROID_SCALER_START] = { [ ANDROID_SCALER_CROP_REGION - ANDROID_SCALER_START ] = { "cropRegion", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_FORMATS - ANDROID_SCALER_START ] = { "availableFormats", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS - ANDROID_SCALER_START ] = { "availableJpegMinDurations", TYPE_INT64 }, [ ANDROID_SCALER_AVAILABLE_JPEG_SIZES - ANDROID_SCALER_START ] = { "availableJpegSizes", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM - ANDROID_SCALER_START ] = { "availableMaxDigitalZoom", TYPE_FLOAT }, [ ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS - ANDROID_SCALER_START ] = { "availableProcessedMinDurations", TYPE_INT64 }, [ ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES - ANDROID_SCALER_START ] = { "availableProcessedSizes", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS - ANDROID_SCALER_START ] = { "availableRawMinDurations", TYPE_INT64 }, [ ANDROID_SCALER_AVAILABLE_RAW_SIZES - ANDROID_SCALER_START ] = { "availableRawSizes", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP - ANDROID_SCALER_START ] = { "availableInputOutputFormatsMap", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS - ANDROID_SCALER_START ] = { "availableStreamConfigurations", TYPE_INT32 }, [ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS - ANDROID_SCALER_START ] = { "availableMinFrameDurations", TYPE_INT64 }, [ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS - ANDROID_SCALER_START ] = { "availableStallDurations", TYPE_INT64 }, [ ANDROID_SCALER_CROPPING_TYPE - ANDROID_SCALER_START ] = { "croppingType", TYPE_BYTE }, }; static tag_info_t android_sensor[ANDROID_SENSOR_END - ANDROID_SENSOR_START] = { [ ANDROID_SENSOR_EXPOSURE_TIME - ANDROID_SENSOR_START ] = { "exposureTime", TYPE_INT64 }, [ ANDROID_SENSOR_FRAME_DURATION - ANDROID_SENSOR_START ] = { "frameDuration", TYPE_INT64 }, [ ANDROID_SENSOR_SENSITIVITY - ANDROID_SENSOR_START ] = { "sensitivity", TYPE_INT32 }, [ ANDROID_SENSOR_REFERENCE_ILLUMINANT1 - ANDROID_SENSOR_START ] = { "referenceIlluminant1", TYPE_BYTE }, [ ANDROID_SENSOR_REFERENCE_ILLUMINANT2 - ANDROID_SENSOR_START ] = { "referenceIlluminant2", TYPE_BYTE }, [ ANDROID_SENSOR_CALIBRATION_TRANSFORM1 - ANDROID_SENSOR_START ] = { "calibrationTransform1", TYPE_RATIONAL }, [ ANDROID_SENSOR_CALIBRATION_TRANSFORM2 - ANDROID_SENSOR_START ] = { "calibrationTransform2", TYPE_RATIONAL }, [ ANDROID_SENSOR_COLOR_TRANSFORM1 - ANDROID_SENSOR_START ] = { "colorTransform1", TYPE_RATIONAL }, [ ANDROID_SENSOR_COLOR_TRANSFORM2 - ANDROID_SENSOR_START ] = { "colorTransform2", TYPE_RATIONAL }, [ ANDROID_SENSOR_FORWARD_MATRIX1 - ANDROID_SENSOR_START ] = { "forwardMatrix1", TYPE_RATIONAL }, [ ANDROID_SENSOR_FORWARD_MATRIX2 - ANDROID_SENSOR_START ] = { "forwardMatrix2", TYPE_RATIONAL }, [ ANDROID_SENSOR_BASE_GAIN_FACTOR - ANDROID_SENSOR_START ] = { "baseGainFactor", TYPE_RATIONAL }, [ ANDROID_SENSOR_BLACK_LEVEL_PATTERN - ANDROID_SENSOR_START ] = { "blackLevelPattern", TYPE_INT32 }, [ ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY - ANDROID_SENSOR_START ] = { "maxAnalogSensitivity", TYPE_INT32 }, [ ANDROID_SENSOR_ORIENTATION - ANDROID_SENSOR_START ] = { "orientation", TYPE_INT32 }, [ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS - ANDROID_SENSOR_START ] = { "profileHueSatMapDimensions", TYPE_INT32 }, [ ANDROID_SENSOR_TIMESTAMP - ANDROID_SENSOR_START ] = { "timestamp", TYPE_INT64 }, [ ANDROID_SENSOR_TEMPERATURE - ANDROID_SENSOR_START ] = { "temperature", TYPE_FLOAT }, [ ANDROID_SENSOR_NEUTRAL_COLOR_POINT - ANDROID_SENSOR_START ] = { "neutralColorPoint", TYPE_RATIONAL }, [ ANDROID_SENSOR_NOISE_PROFILE - ANDROID_SENSOR_START ] = { "noiseProfile", TYPE_DOUBLE }, [ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP - ANDROID_SENSOR_START ] = { "profileHueSatMap", TYPE_FLOAT }, [ ANDROID_SENSOR_PROFILE_TONE_CURVE - ANDROID_SENSOR_START ] = { "profileToneCurve", TYPE_FLOAT }, [ ANDROID_SENSOR_GREEN_SPLIT - ANDROID_SENSOR_START ] = { "greenSplit", TYPE_FLOAT }, [ ANDROID_SENSOR_TEST_PATTERN_DATA - ANDROID_SENSOR_START ] = { "testPatternData", TYPE_INT32 }, [ ANDROID_SENSOR_TEST_PATTERN_MODE - ANDROID_SENSOR_START ] = { "testPatternMode", TYPE_INT32 }, [ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES - ANDROID_SENSOR_START ] = { "availableTestPatternModes", TYPE_INT32 }, [ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW - ANDROID_SENSOR_START ] = { "rollingShutterSkew", TYPE_INT64 }, [ ANDROID_SENSOR_OPTICAL_BLACK_REGIONS - ANDROID_SENSOR_START ] = { "opticalBlackRegions", TYPE_INT32 }, [ ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL - ANDROID_SENSOR_START ] = { "dynamicBlackLevel", TYPE_FLOAT }, [ ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL - ANDROID_SENSOR_START ] = { "dynamicWhiteLevel", TYPE_INT32 }, [ ANDROID_SENSOR_OPAQUE_RAW_SIZE - ANDROID_SENSOR_START ] = { "opaqueRawSize", TYPE_INT32 }, }; static tag_info_t android_sensor_info[ANDROID_SENSOR_INFO_END - ANDROID_SENSOR_INFO_START] = { [ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] = { "activeArraySize", TYPE_INT32 }, [ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE - ANDROID_SENSOR_INFO_START ] = { "sensitivityRange", TYPE_INT32 }, [ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT - ANDROID_SENSOR_INFO_START ] = { "colorFilterArrangement", TYPE_BYTE }, [ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE - ANDROID_SENSOR_INFO_START ] = { "exposureTimeRange", TYPE_INT64 }, [ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION - ANDROID_SENSOR_INFO_START ] = { "maxFrameDuration", TYPE_INT64 }, [ ANDROID_SENSOR_INFO_PHYSICAL_SIZE - ANDROID_SENSOR_INFO_START ] = { "physicalSize", TYPE_FLOAT }, [ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] = { "pixelArraySize", TYPE_INT32 }, [ ANDROID_SENSOR_INFO_WHITE_LEVEL - ANDROID_SENSOR_INFO_START ] = { "whiteLevel", TYPE_INT32 }, [ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE - ANDROID_SENSOR_INFO_START ] = { "timestampSource", TYPE_BYTE }, [ ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED - ANDROID_SENSOR_INFO_START ] = { "lensShadingApplied", TYPE_BYTE }, [ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] = { "preCorrectionActiveArraySize", TYPE_INT32 }, }; static tag_info_t android_shading[ANDROID_SHADING_END - ANDROID_SHADING_START] = { [ ANDROID_SHADING_MODE - ANDROID_SHADING_START ] = { "mode", TYPE_BYTE }, [ ANDROID_SHADING_STRENGTH - ANDROID_SHADING_START ] = { "strength", TYPE_BYTE }, [ ANDROID_SHADING_AVAILABLE_MODES - ANDROID_SHADING_START ] = { "availableModes", TYPE_BYTE }, }; static tag_info_t android_statistics[ANDROID_STATISTICS_END - ANDROID_STATISTICS_START] = { [ ANDROID_STATISTICS_FACE_DETECT_MODE - ANDROID_STATISTICS_START ] = { "faceDetectMode", TYPE_BYTE }, [ ANDROID_STATISTICS_HISTOGRAM_MODE - ANDROID_STATISTICS_START ] = { "histogramMode", TYPE_BYTE }, [ ANDROID_STATISTICS_SHARPNESS_MAP_MODE - ANDROID_STATISTICS_START ] = { "sharpnessMapMode", TYPE_BYTE }, [ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE - ANDROID_STATISTICS_START ] = { "hotPixelMapMode", TYPE_BYTE }, [ ANDROID_STATISTICS_FACE_IDS - ANDROID_STATISTICS_START ] = { "faceIds", TYPE_INT32 }, [ ANDROID_STATISTICS_FACE_LANDMARKS - ANDROID_STATISTICS_START ] = { "faceLandmarks", TYPE_INT32 }, [ ANDROID_STATISTICS_FACE_RECTANGLES - ANDROID_STATISTICS_START ] = { "faceRectangles", TYPE_INT32 }, [ ANDROID_STATISTICS_FACE_SCORES - ANDROID_STATISTICS_START ] = { "faceScores", TYPE_BYTE }, [ ANDROID_STATISTICS_HISTOGRAM - ANDROID_STATISTICS_START ] = { "histogram", TYPE_INT32 }, [ ANDROID_STATISTICS_SHARPNESS_MAP - ANDROID_STATISTICS_START ] = { "sharpnessMap", TYPE_INT32 }, [ ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP - ANDROID_STATISTICS_START ] = { "lensShadingCorrectionMap", TYPE_BYTE }, [ ANDROID_STATISTICS_LENS_SHADING_MAP - ANDROID_STATISTICS_START ] = { "lensShadingMap", TYPE_FLOAT }, [ ANDROID_STATISTICS_PREDICTED_COLOR_GAINS - ANDROID_STATISTICS_START ] = { "predictedColorGains", TYPE_FLOAT }, [ ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM - ANDROID_STATISTICS_START ] = { "predictedColorTransform", TYPE_RATIONAL }, [ ANDROID_STATISTICS_SCENE_FLICKER - ANDROID_STATISTICS_START ] = { "sceneFlicker", TYPE_BYTE }, [ ANDROID_STATISTICS_HOT_PIXEL_MAP - ANDROID_STATISTICS_START ] = { "hotPixelMap", TYPE_INT32 }, [ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE - ANDROID_STATISTICS_START ] = { "lensShadingMapMode", TYPE_BYTE }, [ ANDROID_STATISTICS_OIS_DATA_MODE - ANDROID_STATISTICS_START ] = { "oisDataMode", TYPE_BYTE }, [ ANDROID_STATISTICS_OIS_TIMESTAMPS - ANDROID_STATISTICS_START ] = { "oisTimestamps", TYPE_INT64 }, [ ANDROID_STATISTICS_OIS_X_SHIFTS - ANDROID_STATISTICS_START ] = { "oisXShifts", TYPE_FLOAT }, [ ANDROID_STATISTICS_OIS_Y_SHIFTS - ANDROID_STATISTICS_START ] = { "oisYShifts", TYPE_FLOAT }, }; static tag_info_t android_statistics_info[ANDROID_STATISTICS_INFO_END - ANDROID_STATISTICS_INFO_START] = { [ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES - ANDROID_STATISTICS_INFO_START ] = { "availableFaceDetectModes", TYPE_BYTE }, [ ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT - ANDROID_STATISTICS_INFO_START ] = { "histogramBucketCount", TYPE_INT32 }, [ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT - ANDROID_STATISTICS_INFO_START ] = { "maxFaceCount", TYPE_INT32 }, [ ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT - ANDROID_STATISTICS_INFO_START ] = { "maxHistogramCount", TYPE_INT32 }, [ ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE - ANDROID_STATISTICS_INFO_START ] = { "maxSharpnessMapValue", TYPE_INT32 }, [ ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE - ANDROID_STATISTICS_INFO_START ] = { "sharpnessMapSize", TYPE_INT32 }, [ ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES - ANDROID_STATISTICS_INFO_START ] = { "availableHotPixelMapModes", TYPE_BYTE }, [ ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES - ANDROID_STATISTICS_INFO_START ] = { "availableLensShadingMapModes", TYPE_BYTE }, [ ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES - ANDROID_STATISTICS_INFO_START ] = { "availableOisDataModes", TYPE_BYTE }, }; static tag_info_t android_tonemap[ANDROID_TONEMAP_END - ANDROID_TONEMAP_START] = { [ ANDROID_TONEMAP_CURVE_BLUE - ANDROID_TONEMAP_START ] = { "curveBlue", TYPE_FLOAT }, [ ANDROID_TONEMAP_CURVE_GREEN - ANDROID_TONEMAP_START ] = { "curveGreen", TYPE_FLOAT }, [ ANDROID_TONEMAP_CURVE_RED - ANDROID_TONEMAP_START ] = { "curveRed", TYPE_FLOAT }, [ ANDROID_TONEMAP_MODE - ANDROID_TONEMAP_START ] = { "mode", TYPE_BYTE }, [ ANDROID_TONEMAP_MAX_CURVE_POINTS - ANDROID_TONEMAP_START ] = { "maxCurvePoints", TYPE_INT32 }, [ ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES - ANDROID_TONEMAP_START ] = { "availableToneMapModes", TYPE_BYTE }, [ ANDROID_TONEMAP_GAMMA - ANDROID_TONEMAP_START ] = { "gamma", TYPE_FLOAT }, [ ANDROID_TONEMAP_PRESET_CURVE - ANDROID_TONEMAP_START ] = { "presetCurve", TYPE_BYTE }, }; static tag_info_t android_led[ANDROID_LED_END - ANDROID_LED_START] = { [ ANDROID_LED_TRANSMIT - ANDROID_LED_START ] = { "transmit", TYPE_BYTE }, [ ANDROID_LED_AVAILABLE_LEDS - ANDROID_LED_START ] = { "availableLeds", TYPE_BYTE }, }; static tag_info_t android_info[ANDROID_INFO_END - ANDROID_INFO_START] = { [ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL - ANDROID_INFO_START ] = { "supportedHardwareLevel", TYPE_BYTE }, [ ANDROID_INFO_VERSION - ANDROID_INFO_START ] = { "version", TYPE_BYTE }, }; static tag_info_t android_black_level[ANDROID_BLACK_LEVEL_END - ANDROID_BLACK_LEVEL_START] = { [ ANDROID_BLACK_LEVEL_LOCK - ANDROID_BLACK_LEVEL_START ] = { "lock", TYPE_BYTE }, }; static tag_info_t android_sync[ANDROID_SYNC_END - ANDROID_SYNC_START] = { [ ANDROID_SYNC_FRAME_NUMBER - ANDROID_SYNC_START ] = { "frameNumber", TYPE_INT64 }, [ ANDROID_SYNC_MAX_LATENCY - ANDROID_SYNC_START ] = { "maxLatency", TYPE_INT32 }, }; static tag_info_t android_reprocess[ANDROID_REPROCESS_END - ANDROID_REPROCESS_START] = { [ ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR - ANDROID_REPROCESS_START ] = { "effectiveExposureFactor", TYPE_FLOAT }, [ ANDROID_REPROCESS_MAX_CAPTURE_STALL - ANDROID_REPROCESS_START ] = { "maxCaptureStall", TYPE_INT32 }, }; static tag_info_t android_depth[ANDROID_DEPTH_END - ANDROID_DEPTH_START] = { [ ANDROID_DEPTH_MAX_DEPTH_SAMPLES - ANDROID_DEPTH_START ] = { "maxDepthSamples", TYPE_INT32 }, [ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS - ANDROID_DEPTH_START ] = { "availableDepthStreamConfigurations", TYPE_INT32 }, [ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS - ANDROID_DEPTH_START ] = { "availableDepthMinFrameDurations", TYPE_INT64 }, [ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS - ANDROID_DEPTH_START ] = { "availableDepthStallDurations", TYPE_INT64 }, [ ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE - ANDROID_DEPTH_START ] = { "depthIsExclusive", TYPE_BYTE }, }; static tag_info_t android_logical_multi_camera[ANDROID_LOGICAL_MULTI_CAMERA_END - ANDROID_LOGICAL_MULTI_CAMERA_START] = { [ ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS - ANDROID_LOGICAL_MULTI_CAMERA_START ] = { "physicalIds", TYPE_BYTE }, [ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE - ANDROID_LOGICAL_MULTI_CAMERA_START ] = { "sensorSyncType", TYPE_BYTE }, }; static tag_info_t android_distortion_correction[ANDROID_DISTORTION_CORRECTION_END - ANDROID_DISTORTION_CORRECTION_START] = { [ ANDROID_DISTORTION_CORRECTION_MODE - ANDROID_DISTORTION_CORRECTION_START ] = { "mode", TYPE_BYTE }, [ ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES - ANDROID_DISTORTION_CORRECTION_START ] = { "availableModes", TYPE_BYTE }, }; tag_info_t *tag_info[ANDROID_SECTION_COUNT] = { android_color_correction, android_control, android_demosaic, android_edge, android_flash, android_flash_info, android_hot_pixel, android_jpeg, android_lens, android_lens_info, android_noise_reduction, android_quirks, android_request, android_scaler, android_sensor, android_sensor_info, android_shading, android_statistics, android_statistics_info, android_tonemap, android_led, android_info, android_black_level, android_sync, android_reprocess, android_depth, android_logical_multi_camera, android_distortion_correction, }; int camera_metadata_enum_snprint(uint32_t tag, uint32_t value, char *dst, size_t size) { const char *msg = "error: not an enum"; int ret = -1; switch(tag) { case ANDROID_COLOR_CORRECTION_MODE: { switch (value) { case ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX: msg = "TRANSFORM_MATRIX"; ret = 0; break; case ANDROID_COLOR_CORRECTION_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_COLOR_CORRECTION_TRANSFORM: { break; } case ANDROID_COLOR_CORRECTION_GAINS: { break; } case ANDROID_COLOR_CORRECTION_ABERRATION_MODE: { switch (value) { case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES: { break; } case ANDROID_CONTROL_AE_ANTIBANDING_MODE: { switch (value) { case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ: msg = "50HZ"; ret = 0; break; case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ: msg = "60HZ"; ret = 0; break; case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO: msg = "AUTO"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: { break; } case ANDROID_CONTROL_AE_LOCK: { switch (value) { case ANDROID_CONTROL_AE_LOCK_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AE_LOCK_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AE_MODE: { switch (value) { case ANDROID_CONTROL_AE_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AE_MODE_ON: msg = "ON"; ret = 0; break; case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH: msg = "ON_AUTO_FLASH"; ret = 0; break; case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH: msg = "ON_ALWAYS_FLASH"; ret = 0; break; case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: msg = "ON_AUTO_FLASH_REDEYE"; ret = 0; break; case ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH: msg = "ON_EXTERNAL_FLASH"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AE_REGIONS: { break; } case ANDROID_CONTROL_AE_TARGET_FPS_RANGE: { break; } case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: { switch (value) { case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE: msg = "IDLE"; ret = 0; break; case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START: msg = "START"; ret = 0; break; case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL: msg = "CANCEL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AF_MODE: { switch (value) { case ANDROID_CONTROL_AF_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AF_MODE_AUTO: msg = "AUTO"; ret = 0; break; case ANDROID_CONTROL_AF_MODE_MACRO: msg = "MACRO"; ret = 0; break; case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: msg = "CONTINUOUS_VIDEO"; ret = 0; break; case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: msg = "CONTINUOUS_PICTURE"; ret = 0; break; case ANDROID_CONTROL_AF_MODE_EDOF: msg = "EDOF"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AF_REGIONS: { break; } case ANDROID_CONTROL_AF_TRIGGER: { switch (value) { case ANDROID_CONTROL_AF_TRIGGER_IDLE: msg = "IDLE"; ret = 0; break; case ANDROID_CONTROL_AF_TRIGGER_START: msg = "START"; ret = 0; break; case ANDROID_CONTROL_AF_TRIGGER_CANCEL: msg = "CANCEL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AWB_LOCK: { switch (value) { case ANDROID_CONTROL_AWB_LOCK_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AWB_LOCK_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AWB_MODE: { switch (value) { case ANDROID_CONTROL_AWB_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_AUTO: msg = "AUTO"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: msg = "INCANDESCENT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: msg = "FLUORESCENT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: msg = "WARM_FLUORESCENT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: msg = "DAYLIGHT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: msg = "CLOUDY_DAYLIGHT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_TWILIGHT: msg = "TWILIGHT"; ret = 0; break; case ANDROID_CONTROL_AWB_MODE_SHADE: msg = "SHADE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AWB_REGIONS: { break; } case ANDROID_CONTROL_CAPTURE_INTENT: { switch (value) { case ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM: msg = "CUSTOM"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW: msg = "PREVIEW"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE: msg = "STILL_CAPTURE"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD: msg = "VIDEO_RECORD"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT: msg = "VIDEO_SNAPSHOT"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG: msg = "ZERO_SHUTTER_LAG"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_MANUAL: msg = "MANUAL"; ret = 0; break; case ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING: msg = "MOTION_TRACKING"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_EFFECT_MODE: { switch (value) { case ANDROID_CONTROL_EFFECT_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_MONO: msg = "MONO"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE: msg = "NEGATIVE"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE: msg = "SOLARIZE"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_SEPIA: msg = "SEPIA"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE: msg = "POSTERIZE"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD: msg = "WHITEBOARD"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD: msg = "BLACKBOARD"; ret = 0; break; case ANDROID_CONTROL_EFFECT_MODE_AQUA: msg = "AQUA"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_MODE: { switch (value) { case ANDROID_CONTROL_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_MODE_AUTO: msg = "AUTO"; ret = 0; break; case ANDROID_CONTROL_MODE_USE_SCENE_MODE: msg = "USE_SCENE_MODE"; ret = 0; break; case ANDROID_CONTROL_MODE_OFF_KEEP_STATE: msg = "OFF_KEEP_STATE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_SCENE_MODE: { switch (value) { case ANDROID_CONTROL_SCENE_MODE_DISABLED: msg = "DISABLED"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: msg = "FACE_PRIORITY"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_ACTION: msg = "ACTION"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_PORTRAIT: msg = "PORTRAIT"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_LANDSCAPE: msg = "LANDSCAPE"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_NIGHT: msg = "NIGHT"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT: msg = "NIGHT_PORTRAIT"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_THEATRE: msg = "THEATRE"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_BEACH: msg = "BEACH"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_SNOW: msg = "SNOW"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_SUNSET: msg = "SUNSET"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO: msg = "STEADYPHOTO"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_FIREWORKS: msg = "FIREWORKS"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_SPORTS: msg = "SPORTS"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_PARTY: msg = "PARTY"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT: msg = "CANDLELIGHT"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_BARCODE: msg = "BARCODE"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO: msg = "HIGH_SPEED_VIDEO"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_HDR: msg = "HDR"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT: msg = "FACE_PRIORITY_LOW_LIGHT"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START: msg = "DEVICE_CUSTOM_START"; ret = 0; break; case ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END: msg = "DEVICE_CUSTOM_END"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE: { switch (value) { case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES: { break; } case ANDROID_CONTROL_AE_AVAILABLE_MODES: { break; } case ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES: { break; } case ANDROID_CONTROL_AE_COMPENSATION_RANGE: { break; } case ANDROID_CONTROL_AE_COMPENSATION_STEP: { break; } case ANDROID_CONTROL_AF_AVAILABLE_MODES: { break; } case ANDROID_CONTROL_AVAILABLE_EFFECTS: { break; } case ANDROID_CONTROL_AVAILABLE_SCENE_MODES: { break; } case ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES: { break; } case ANDROID_CONTROL_AWB_AVAILABLE_MODES: { break; } case ANDROID_CONTROL_MAX_REGIONS: { break; } case ANDROID_CONTROL_SCENE_MODE_OVERRIDES: { break; } case ANDROID_CONTROL_AE_PRECAPTURE_ID: { break; } case ANDROID_CONTROL_AE_STATE: { switch (value) { case ANDROID_CONTROL_AE_STATE_INACTIVE: msg = "INACTIVE"; ret = 0; break; case ANDROID_CONTROL_AE_STATE_SEARCHING: msg = "SEARCHING"; ret = 0; break; case ANDROID_CONTROL_AE_STATE_CONVERGED: msg = "CONVERGED"; ret = 0; break; case ANDROID_CONTROL_AE_STATE_LOCKED: msg = "LOCKED"; ret = 0; break; case ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED: msg = "FLASH_REQUIRED"; ret = 0; break; case ANDROID_CONTROL_AE_STATE_PRECAPTURE: msg = "PRECAPTURE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AF_STATE: { switch (value) { case ANDROID_CONTROL_AF_STATE_INACTIVE: msg = "INACTIVE"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: msg = "PASSIVE_SCAN"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: msg = "PASSIVE_FOCUSED"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: msg = "ACTIVE_SCAN"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: msg = "FOCUSED_LOCKED"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: msg = "NOT_FOCUSED_LOCKED"; ret = 0; break; case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: msg = "PASSIVE_UNFOCUSED"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AF_TRIGGER_ID: { break; } case ANDROID_CONTROL_AWB_STATE: { switch (value) { case ANDROID_CONTROL_AWB_STATE_INACTIVE: msg = "INACTIVE"; ret = 0; break; case ANDROID_CONTROL_AWB_STATE_SEARCHING: msg = "SEARCHING"; ret = 0; break; case ANDROID_CONTROL_AWB_STATE_CONVERGED: msg = "CONVERGED"; ret = 0; break; case ANDROID_CONTROL_AWB_STATE_LOCKED: msg = "LOCKED"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS: { break; } case ANDROID_CONTROL_AE_LOCK_AVAILABLE: { switch (value) { case ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AWB_LOCK_AVAILABLE: { switch (value) { case ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AVAILABLE_MODES: { break; } case ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE: { break; } case ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST: { break; } case ANDROID_CONTROL_ENABLE_ZSL: { switch (value) { case ANDROID_CONTROL_ENABLE_ZSL_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_CONTROL_ENABLE_ZSL_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_CONTROL_AF_SCENE_CHANGE: { switch (value) { case ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED: msg = "NOT_DETECTED"; ret = 0; break; case ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED: msg = "DETECTED"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_DEMOSAIC_MODE: { switch (value) { case ANDROID_DEMOSAIC_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_DEMOSAIC_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_EDGE_MODE: { switch (value) { case ANDROID_EDGE_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_EDGE_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_EDGE_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; case ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG: msg = "ZERO_SHUTTER_LAG"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_EDGE_STRENGTH: { break; } case ANDROID_EDGE_AVAILABLE_EDGE_MODES: { break; } case ANDROID_FLASH_FIRING_POWER: { break; } case ANDROID_FLASH_FIRING_TIME: { break; } case ANDROID_FLASH_MODE: { switch (value) { case ANDROID_FLASH_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_FLASH_MODE_SINGLE: msg = "SINGLE"; ret = 0; break; case ANDROID_FLASH_MODE_TORCH: msg = "TORCH"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_FLASH_COLOR_TEMPERATURE: { break; } case ANDROID_FLASH_MAX_ENERGY: { break; } case ANDROID_FLASH_STATE: { switch (value) { case ANDROID_FLASH_STATE_UNAVAILABLE: msg = "UNAVAILABLE"; ret = 0; break; case ANDROID_FLASH_STATE_CHARGING: msg = "CHARGING"; ret = 0; break; case ANDROID_FLASH_STATE_READY: msg = "READY"; ret = 0; break; case ANDROID_FLASH_STATE_FIRED: msg = "FIRED"; ret = 0; break; case ANDROID_FLASH_STATE_PARTIAL: msg = "PARTIAL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_FLASH_INFO_AVAILABLE: { switch (value) { case ANDROID_FLASH_INFO_AVAILABLE_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_FLASH_INFO_AVAILABLE_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_FLASH_INFO_CHARGE_DURATION: { break; } case ANDROID_HOT_PIXEL_MODE: { switch (value) { case ANDROID_HOT_PIXEL_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_HOT_PIXEL_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES: { break; } case ANDROID_JPEG_GPS_COORDINATES: { break; } case ANDROID_JPEG_GPS_PROCESSING_METHOD: { break; } case ANDROID_JPEG_GPS_TIMESTAMP: { break; } case ANDROID_JPEG_ORIENTATION: { break; } case ANDROID_JPEG_QUALITY: { break; } case ANDROID_JPEG_THUMBNAIL_QUALITY: { break; } case ANDROID_JPEG_THUMBNAIL_SIZE: { break; } case ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES: { break; } case ANDROID_JPEG_MAX_SIZE: { break; } case ANDROID_JPEG_SIZE: { break; } case ANDROID_LENS_APERTURE: { break; } case ANDROID_LENS_FILTER_DENSITY: { break; } case ANDROID_LENS_FOCAL_LENGTH: { break; } case ANDROID_LENS_FOCUS_DISTANCE: { break; } case ANDROID_LENS_OPTICAL_STABILIZATION_MODE: { switch (value) { case ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LENS_FACING: { switch (value) { case ANDROID_LENS_FACING_FRONT: msg = "FRONT"; ret = 0; break; case ANDROID_LENS_FACING_BACK: msg = "BACK"; ret = 0; break; case ANDROID_LENS_FACING_EXTERNAL: msg = "EXTERNAL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LENS_POSE_ROTATION: { break; } case ANDROID_LENS_POSE_TRANSLATION: { break; } case ANDROID_LENS_FOCUS_RANGE: { break; } case ANDROID_LENS_STATE: { switch (value) { case ANDROID_LENS_STATE_STATIONARY: msg = "STATIONARY"; ret = 0; break; case ANDROID_LENS_STATE_MOVING: msg = "MOVING"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LENS_INTRINSIC_CALIBRATION: { break; } case ANDROID_LENS_RADIAL_DISTORTION: { break; } case ANDROID_LENS_POSE_REFERENCE: { switch (value) { case ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA: msg = "PRIMARY_CAMERA"; ret = 0; break; case ANDROID_LENS_POSE_REFERENCE_GYROSCOPE: msg = "GYROSCOPE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LENS_DISTORTION: { break; } case ANDROID_LENS_INFO_AVAILABLE_APERTURES: { break; } case ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES: { break; } case ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS: { break; } case ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION: { break; } case ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE: { break; } case ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE: { break; } case ANDROID_LENS_INFO_SHADING_MAP_SIZE: { break; } case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION: { switch (value) { case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED: msg = "UNCALIBRATED"; ret = 0; break; case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE: msg = "APPROXIMATE"; ret = 0; break; case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED: msg = "CALIBRATED"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_NOISE_REDUCTION_MODE: { switch (value) { case ANDROID_NOISE_REDUCTION_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_NOISE_REDUCTION_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; case ANDROID_NOISE_REDUCTION_MODE_MINIMAL: msg = "MINIMAL"; ret = 0; break; case ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG: msg = "ZERO_SHUTTER_LAG"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_NOISE_REDUCTION_STRENGTH: { break; } case ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES: { break; } case ANDROID_QUIRKS_METERING_CROP_REGION: { break; } case ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO: { break; } case ANDROID_QUIRKS_USE_ZSL_FORMAT: { break; } case ANDROID_QUIRKS_USE_PARTIAL_RESULT: { break; } case ANDROID_QUIRKS_PARTIAL_RESULT: { switch (value) { case ANDROID_QUIRKS_PARTIAL_RESULT_FINAL: msg = "FINAL"; ret = 0; break; case ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL: msg = "PARTIAL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_REQUEST_FRAME_COUNT: { break; } case ANDROID_REQUEST_ID: { break; } case ANDROID_REQUEST_INPUT_STREAMS: { break; } case ANDROID_REQUEST_METADATA_MODE: { switch (value) { case ANDROID_REQUEST_METADATA_MODE_NONE: msg = "NONE"; ret = 0; break; case ANDROID_REQUEST_METADATA_MODE_FULL: msg = "FULL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_REQUEST_OUTPUT_STREAMS: { break; } case ANDROID_REQUEST_TYPE: { switch (value) { case ANDROID_REQUEST_TYPE_CAPTURE: msg = "CAPTURE"; ret = 0; break; case ANDROID_REQUEST_TYPE_REPROCESS: msg = "REPROCESS"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS: { break; } case ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS: { break; } case ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS: { break; } case ANDROID_REQUEST_PIPELINE_DEPTH: { break; } case ANDROID_REQUEST_PIPELINE_MAX_DEPTH: { break; } case ANDROID_REQUEST_PARTIAL_RESULT_COUNT: { break; } case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: { switch (value) { case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE: msg = "BACKWARD_COMPATIBLE"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR: msg = "MANUAL_SENSOR"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING: msg = "MANUAL_POST_PROCESSING"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW: msg = "RAW"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING: msg = "PRIVATE_REPROCESSING"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS: msg = "READ_SENSOR_SETTINGS"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE: msg = "BURST_CAPTURE"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING: msg = "YUV_REPROCESSING"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT: msg = "DEPTH_OUTPUT"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO: msg = "CONSTRAINED_HIGH_SPEED_VIDEO"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING: msg = "MOTION_TRACKING"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA: msg = "LOGICAL_MULTI_CAMERA"; ret = 0; break; case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME: msg = "MONOCHROME"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS: { break; } case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS: { break; } case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: { break; } case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS: { break; } case ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS: { break; } case ANDROID_SCALER_CROP_REGION: { break; } case ANDROID_SCALER_AVAILABLE_FORMATS: { switch (value) { case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16: msg = "RAW16"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE: msg = "RAW_OPAQUE"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_YV12: msg = "YV12"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP: msg = "YCrCb_420_SP"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED: msg = "IMPLEMENTATION_DEFINED"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888: msg = "YCbCr_420_888"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_FORMATS_BLOB: msg = "BLOB"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS: { break; } case ANDROID_SCALER_AVAILABLE_JPEG_SIZES: { break; } case ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM: { break; } case ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS: { break; } case ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES: { break; } case ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS: { break; } case ANDROID_SCALER_AVAILABLE_RAW_SIZES: { break; } case ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP: { break; } case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS: { switch (value) { case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT: msg = "OUTPUT"; ret = 0; break; case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT: msg = "INPUT"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS: { break; } case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS: { break; } case ANDROID_SCALER_CROPPING_TYPE: { switch (value) { case ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY: msg = "CENTER_ONLY"; ret = 0; break; case ANDROID_SCALER_CROPPING_TYPE_FREEFORM: msg = "FREEFORM"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_EXPOSURE_TIME: { break; } case ANDROID_SENSOR_FRAME_DURATION: { break; } case ANDROID_SENSOR_SENSITIVITY: { break; } case ANDROID_SENSOR_REFERENCE_ILLUMINANT1: { switch (value) { case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT: msg = "DAYLIGHT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT: msg = "FLUORESCENT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN: msg = "TUNGSTEN"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH: msg = "FLASH"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER: msg = "FINE_WEATHER"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER: msg = "CLOUDY_WEATHER"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE: msg = "SHADE"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT: msg = "DAYLIGHT_FLUORESCENT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT: msg = "DAY_WHITE_FLUORESCENT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT: msg = "COOL_WHITE_FLUORESCENT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT: msg = "WHITE_FLUORESCENT"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A: msg = "STANDARD_A"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B: msg = "STANDARD_B"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C: msg = "STANDARD_C"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55: msg = "D55"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65: msg = "D65"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75: msg = "D75"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50: msg = "D50"; ret = 0; break; case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN: msg = "ISO_STUDIO_TUNGSTEN"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_REFERENCE_ILLUMINANT2: { break; } case ANDROID_SENSOR_CALIBRATION_TRANSFORM1: { break; } case ANDROID_SENSOR_CALIBRATION_TRANSFORM2: { break; } case ANDROID_SENSOR_COLOR_TRANSFORM1: { break; } case ANDROID_SENSOR_COLOR_TRANSFORM2: { break; } case ANDROID_SENSOR_FORWARD_MATRIX1: { break; } case ANDROID_SENSOR_FORWARD_MATRIX2: { break; } case ANDROID_SENSOR_BASE_GAIN_FACTOR: { break; } case ANDROID_SENSOR_BLACK_LEVEL_PATTERN: { break; } case ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY: { break; } case ANDROID_SENSOR_ORIENTATION: { break; } case ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS: { break; } case ANDROID_SENSOR_TIMESTAMP: { break; } case ANDROID_SENSOR_TEMPERATURE: { break; } case ANDROID_SENSOR_NEUTRAL_COLOR_POINT: { break; } case ANDROID_SENSOR_NOISE_PROFILE: { break; } case ANDROID_SENSOR_PROFILE_HUE_SAT_MAP: { break; } case ANDROID_SENSOR_PROFILE_TONE_CURVE: { break; } case ANDROID_SENSOR_GREEN_SPLIT: { break; } case ANDROID_SENSOR_TEST_PATTERN_DATA: { break; } case ANDROID_SENSOR_TEST_PATTERN_MODE: { switch (value) { case ANDROID_SENSOR_TEST_PATTERN_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR: msg = "SOLID_COLOR"; ret = 0; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS: msg = "COLOR_BARS"; ret = 0; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY: msg = "COLOR_BARS_FADE_TO_GRAY"; ret = 0; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_PN9: msg = "PN9"; ret = 0; break; case ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1: msg = "CUSTOM1"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES: { break; } case ANDROID_SENSOR_ROLLING_SHUTTER_SKEW: { break; } case ANDROID_SENSOR_OPTICAL_BLACK_REGIONS: { break; } case ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL: { break; } case ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL: { break; } case ANDROID_SENSOR_OPAQUE_RAW_SIZE: { break; } case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE: { break; } case ANDROID_SENSOR_INFO_SENSITIVITY_RANGE: { break; } case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT: { switch (value) { case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: msg = "RGGB"; ret = 0; break; case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: msg = "GRBG"; ret = 0; break; case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: msg = "GBRG"; ret = 0; break; case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: msg = "BGGR"; ret = 0; break; case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB: msg = "RGB"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE: { break; } case ANDROID_SENSOR_INFO_MAX_FRAME_DURATION: { break; } case ANDROID_SENSOR_INFO_PHYSICAL_SIZE: { break; } case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE: { break; } case ANDROID_SENSOR_INFO_WHITE_LEVEL: { break; } case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE: { switch (value) { case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN: msg = "UNKNOWN"; ret = 0; break; case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME: msg = "REALTIME"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED: { switch (value) { case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE: { break; } case ANDROID_SHADING_MODE: { switch (value) { case ANDROID_SHADING_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_SHADING_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_SHADING_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SHADING_STRENGTH: { break; } case ANDROID_SHADING_AVAILABLE_MODES: { break; } case ANDROID_STATISTICS_FACE_DETECT_MODE: { switch (value) { case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE: msg = "SIMPLE"; ret = 0; break; case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL: msg = "FULL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_HISTOGRAM_MODE: { switch (value) { case ANDROID_STATISTICS_HISTOGRAM_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_HISTOGRAM_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_SHARPNESS_MAP_MODE: { switch (value) { case ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE: { switch (value) { case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_FACE_IDS: { break; } case ANDROID_STATISTICS_FACE_LANDMARKS: { break; } case ANDROID_STATISTICS_FACE_RECTANGLES: { break; } case ANDROID_STATISTICS_FACE_SCORES: { break; } case ANDROID_STATISTICS_HISTOGRAM: { break; } case ANDROID_STATISTICS_SHARPNESS_MAP: { break; } case ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP: { break; } case ANDROID_STATISTICS_LENS_SHADING_MAP: { break; } case ANDROID_STATISTICS_PREDICTED_COLOR_GAINS: { break; } case ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM: { break; } case ANDROID_STATISTICS_SCENE_FLICKER: { switch (value) { case ANDROID_STATISTICS_SCENE_FLICKER_NONE: msg = "NONE"; ret = 0; break; case ANDROID_STATISTICS_SCENE_FLICKER_50HZ: msg = "50HZ"; ret = 0; break; case ANDROID_STATISTICS_SCENE_FLICKER_60HZ: msg = "60HZ"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_HOT_PIXEL_MAP: { break; } case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE: { switch (value) { case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_OIS_DATA_MODE: { switch (value) { case ANDROID_STATISTICS_OIS_DATA_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_STATISTICS_OIS_DATA_MODE_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_STATISTICS_OIS_TIMESTAMPS: { break; } case ANDROID_STATISTICS_OIS_X_SHIFTS: { break; } case ANDROID_STATISTICS_OIS_Y_SHIFTS: { break; } case ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES: { break; } case ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT: { break; } case ANDROID_STATISTICS_INFO_MAX_FACE_COUNT: { break; } case ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT: { break; } case ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE: { break; } case ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE: { break; } case ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES: { break; } case ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES: { break; } case ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES: { break; } case ANDROID_TONEMAP_CURVE_BLUE: { break; } case ANDROID_TONEMAP_CURVE_GREEN: { break; } case ANDROID_TONEMAP_CURVE_RED: { break; } case ANDROID_TONEMAP_MODE: { switch (value) { case ANDROID_TONEMAP_MODE_CONTRAST_CURVE: msg = "CONTRAST_CURVE"; ret = 0; break; case ANDROID_TONEMAP_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_TONEMAP_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; case ANDROID_TONEMAP_MODE_GAMMA_VALUE: msg = "GAMMA_VALUE"; ret = 0; break; case ANDROID_TONEMAP_MODE_PRESET_CURVE: msg = "PRESET_CURVE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_TONEMAP_MAX_CURVE_POINTS: { break; } case ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES: { break; } case ANDROID_TONEMAP_GAMMA: { break; } case ANDROID_TONEMAP_PRESET_CURVE: { switch (value) { case ANDROID_TONEMAP_PRESET_CURVE_SRGB: msg = "SRGB"; ret = 0; break; case ANDROID_TONEMAP_PRESET_CURVE_REC709: msg = "REC709"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LED_TRANSMIT: { switch (value) { case ANDROID_LED_TRANSMIT_OFF: msg = "OFF"; ret = 0; break; case ANDROID_LED_TRANSMIT_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LED_AVAILABLE_LEDS: { switch (value) { case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: msg = "TRANSMIT"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL: { switch (value) { case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: msg = "LIMITED"; ret = 0; break; case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: msg = "FULL"; ret = 0; break; case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: msg = "LEGACY"; ret = 0; break; case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3: msg = "3"; ret = 0; break; case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL: msg = "EXTERNAL"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_INFO_VERSION: { break; } case ANDROID_BLACK_LEVEL_LOCK: { switch (value) { case ANDROID_BLACK_LEVEL_LOCK_OFF: msg = "OFF"; ret = 0; break; case ANDROID_BLACK_LEVEL_LOCK_ON: msg = "ON"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SYNC_FRAME_NUMBER: { switch (value) { case ANDROID_SYNC_FRAME_NUMBER_CONVERGING: msg = "CONVERGING"; ret = 0; break; case ANDROID_SYNC_FRAME_NUMBER_UNKNOWN: msg = "UNKNOWN"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_SYNC_MAX_LATENCY: { switch (value) { case ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL: msg = "PER_FRAME_CONTROL"; ret = 0; break; case ANDROID_SYNC_MAX_LATENCY_UNKNOWN: msg = "UNKNOWN"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR: { break; } case ANDROID_REPROCESS_MAX_CAPTURE_STALL: { break; } case ANDROID_DEPTH_MAX_DEPTH_SAMPLES: { break; } case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS: { switch (value) { case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT: msg = "OUTPUT"; ret = 0; break; case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT: msg = "INPUT"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS: { break; } case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS: { break; } case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE: { switch (value) { case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE: msg = "FALSE"; ret = 0; break; case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE: msg = "TRUE"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: { break; } case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE: { switch (value) { case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE: msg = "APPROXIMATE"; ret = 0; break; case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED: msg = "CALIBRATED"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_DISTORTION_CORRECTION_MODE: { switch (value) { case ANDROID_DISTORTION_CORRECTION_MODE_OFF: msg = "OFF"; ret = 0; break; case ANDROID_DISTORTION_CORRECTION_MODE_FAST: msg = "FAST"; ret = 0; break; case ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY: msg = "HIGH_QUALITY"; ret = 0; break; default: msg = "error: enum value out of range"; } break; } case ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES: { break; } } strncpy(dst, msg, size - 1); dst[size - 1] = '\0'; return ret; } #define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 29
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_ids_draft.yaml
# SPDX-License-Identifier: LGPL-2.1-or-later # # Copyright (C) 2019, Google Inc. # %YAML 1.1 --- # Unless otherwise stated, all controls are bi-directional, i.e. they can be # set through Request::controls() and returned out through Request::metadata(). vendor: draft controls: - AePrecaptureTrigger: type: int32_t description: | Control for AE metering trigger. Currently identical to ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. Whether the camera device will trigger a precapture metering sequence when it processes this request. enum: - name: AePrecaptureTriggerIdle value: 0 description: The trigger is idle. - name: AePrecaptureTriggerStart value: 1 description: The pre-capture AE metering is started by the camera. - name: AePrecaptureTriggerCancel value: 2 description: | The camera will cancel any active or completed metering sequence. The AE algorithm is reset to its initial state. - NoiseReductionMode: type: int32_t description: | Control to select the noise reduction algorithm mode. Currently identical to ANDROID_NOISE_REDUCTION_MODE. Mode of operation for the noise reduction algorithm. enum: - name: NoiseReductionModeOff value: 0 description: No noise reduction is applied - name: NoiseReductionModeFast value: 1 description: | Noise reduction is applied without reducing the frame rate. - name: NoiseReductionModeHighQuality value: 2 description: | High quality noise reduction at the expense of frame rate. - name: NoiseReductionModeMinimal value: 3 description: | Minimal noise reduction is applied without reducing the frame rate. - name: NoiseReductionModeZSL value: 4 description: | Noise reduction is applied at different levels to different streams. - ColorCorrectionAberrationMode: type: int32_t description: | Control to select the color correction aberration mode. Currently identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. Mode of operation for the chromatic aberration correction algorithm. enum: - name: ColorCorrectionAberrationOff value: 0 description: No aberration correction is applied. - name: ColorCorrectionAberrationFast value: 1 description: Aberration correction will not slow down the frame rate. - name: ColorCorrectionAberrationHighQuality value: 2 description: | High quality aberration correction which might reduce the frame rate. - AeState: type: int32_t description: | Control to report the current AE algorithm state. Currently identical to ANDROID_CONTROL_AE_STATE. Current state of the AE algorithm. enum: - name: AeStateInactive value: 0 description: The AE algorithm is inactive. - name: AeStateSearching value: 1 description: The AE algorithm has not converged yet. - name: AeStateConverged value: 2 description: The AE algorithm has converged. - name: AeStateLocked value: 3 description: The AE algorithm is locked. - name: AeStateFlashRequired value: 4 description: The AE algorithm would need a flash for good results - name: AeStatePrecapture value: 5 description: | The AE algorithm has started a pre-capture metering session. \sa AePrecaptureTrigger - AwbState: type: int32_t description: | Control to report the current AWB algorithm state. Currently identical to ANDROID_CONTROL_AWB_STATE. Current state of the AWB algorithm. enum: - name: AwbStateInactive value: 0 description: The AWB algorithm is inactive. - name: AwbStateSearching value: 1 description: The AWB algorithm has not converged yet. - name: AwbConverged value: 2 description: The AWB algorithm has converged. - name: AwbLocked value: 3 description: The AWB algorithm is locked. - SensorRollingShutterSkew: type: int64_t description: | Control to report the time between the start of exposure of the first row and the start of exposure of the last row. Currently identical to ANDROID_SENSOR_ROLLING_SHUTTER_SKEW - LensShadingMapMode: type: int32_t description: | Control to report if the lens shading map is available. Currently identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. enum: - name: LensShadingMapModeOff value: 0 description: No lens shading map mode is available. - name: LensShadingMapModeOn value: 1 description: The lens shading map mode is available. - PipelineDepth: type: int32_t description: | Specifies the number of pipeline stages the frame went through from when it was exposed to when the final completed result was available to the framework. Always less than or equal to PipelineMaxDepth. Currently identical to ANDROID_REQUEST_PIPELINE_DEPTH. The typical value for this control is 3 as a frame is first exposed, captured and then processed in a single pass through the ISP. Any additional processing step performed after the ISP pass (in example face detection, additional format conversions etc) count as an additional pipeline stage. - MaxLatency: type: int32_t description: | The maximum number of frames that can occur after a request (different than the previous) has been submitted, and before the result's state becomes synchronized. A value of -1 indicates unknown latency, and 0 indicates per-frame control. Currently identical to ANDROID_SYNC_MAX_LATENCY. - TestPatternMode: type: int32_t description: | Control to select the test pattern mode. Currently identical to ANDROID_SENSOR_TEST_PATTERN_MODE. enum: - name: TestPatternModeOff value: 0 description: | No test pattern mode is used. The camera device returns frames from the image sensor. - name: TestPatternModeSolidColor value: 1 description: | Each pixel in [R, G_even, G_odd, B] is replaced by its respective color channel provided in test pattern data. \todo Add control for test pattern data. - name: TestPatternModeColorBars value: 2 description: | All pixel data is replaced with an 8-bar color pattern. The vertical bars (left-to-right) are as follows; white, yellow, cyan, green, magenta, red, blue and black. Each bar should take up 1/8 of the sensor pixel array width. When this is not possible, the bar size should be rounded down to the nearest integer and the pattern can repeat on the right side. Each bar's height must always take up the full sensor pixel array height. - name: TestPatternModeColorBarsFadeToGray value: 3 description: | The test pattern is similar to TestPatternModeColorBars, except that each bar should start at its specified color at the top and fade to gray at the bottom. Furthermore each bar is further subdevided into a left and right half. The left half should have a smooth gradient, and the right half should have a quantized gradient. In particular, the right half's should consist of blocks of the same color for 1/16th active sensor pixel array width. The least significant bits in the quantized gradient should be copied from the most significant bits of the smooth gradient. The height of each bar should always be a multiple of 128. When this is not the case, the pattern should repeat at the bottom of the image. - name: TestPatternModePn9 value: 4 description: | All pixel data is replaced by a pseudo-random sequence generated from a PN9 512-bit sequence (typically implemented in hardware with a linear feedback shift register). The generator should be reset at the beginning of each frame, and thus each subsequent raw frame with this test pattern should be exactly the same as the last. - name: TestPatternModeCustom1 value: 256 description: | The first custom test pattern. All custom patterns that are available only on this camera device are at least this numeric value. All of the custom test patterns will be static (that is the raw image must not vary from frame to frame). ...
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_ranges.yaml
# SPDX-License-Identifier: LGPL-2.1-or-later # # Copyright (C) 2023, Raspberry Pi Ltd # %YAML 1.1 --- # Specifies the control id ranges/offsets for core/draft libcamera and vendor # controls and properties. ranges: # Core libcamera controls libcamera: 0 # Draft designated libcamera controls draft: 10000 # Raspberry Pi vendor controls rpi: 20000 # Next range starts at 30000 ...
0
repos/libcamera/src
repos/libcamera/src/libcamera/media_device.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Media device handler */ #include "libcamera/internal/media_device.h" #include <errno.h> #include <fcntl.h> #include <stdint.h> #include <string> #include <string.h> #include <sys/ioctl.h> #include <unistd.h> #include <vector> #include <linux/media.h> #include <libcamera/base/log.h> /** * \file media_device.h * \brief Provide a representation of a Linux kernel Media Controller device * that exposes the full graph topology. */ namespace libcamera { LOG_DEFINE_CATEGORY(MediaDevice) /** * \class MediaDevice * \brief The MediaDevice represents a Media Controller device with its full * graph of connected objects. * * A MediaDevice instance is associated with a media controller device node when * created, and that association is kept for the lifetime of the MediaDevice * instance. * * The instance is created with an empty media graph. Before performing any * other operation, it must be populate by calling populate(). Instances of * MediaEntity, MediaPad and MediaLink are created to model the media graph, * and stored in a map indexed by object id. * * The graph is valid once successfully populated, as reported by the isValid() * function. It can be queried to list all entities(), or entities can be * looked up by name with getEntityByName(). The graph can be traversed from * entity to entity through pads and links as exposed by the corresponding * classes. * * Media devices can be claimed for exclusive use with acquire(), released with * release() and tested with busy(). This mechanism is aimed at pipeline * managers to claim media devices they support during enumeration. */ /** * \brief Construct a MediaDevice * \param[in] deviceNode The media device node path * * Once constructed the media device is invalid, and must be populated with * populate() before the media graph can be queried. */ MediaDevice::MediaDevice(const std::string &deviceNode) : deviceNode_(deviceNode), valid_(false), acquired_(false) { } MediaDevice::~MediaDevice() { fd_.reset(); clear(); } std::string MediaDevice::logPrefix() const { return deviceNode() + "[" + driver() + "]"; } /** * \brief Claim a device for exclusive use * * The device claiming mechanism offers simple media device access arbitration * between multiple users. When the media device is created, it is available to * all users. Users can query the media graph to determine whether they can * support the device and, if they do, claim the device for exclusive use. Other * users are then expected to skip over media devices in use as reported by the * busy() function. * * Once claimed the device shall be released by its user when not needed anymore * by calling the release() function. Acquiring the media device opens a file * descriptor to the device which is kept open until release() is called. * * Exclusive access is only guaranteed if all users of the media device abide by * the device claiming mechanism, as it isn't enforced by the media device * itself. * * \return true if the device was successfully claimed, or false if it was * already in use * \sa release(), busy() */ bool MediaDevice::acquire() { if (acquired_) return false; if (open()) return false; acquired_ = true; return true; } /** * \brief Release a device previously claimed for exclusive use * \sa acquire(), busy() */ void MediaDevice::release() { close(); acquired_ = false; } /** * \brief Lock the device to prevent it from being used by other instances of * libcamera * * Multiple instances of libcamera might be running on the same system, at the * same time. To allow the different instances to coexist, system resources in * the form of media devices must be accessible for enumerating the cameras * they provide at all times, while still allowing an instance to lock a * resource while it prepares to actively use a camera from the resource. * * This function shall not be called from a pipeline handler implementation * directly, as the base PipelineHandler implementation handles this on the * behalf of the specified implementation. * * \return True if the device could be locked, false otherwise * \sa unlock() */ bool MediaDevice::lock() { if (!fd_.isValid()) return false; if (lockf(fd_.get(), F_TLOCK, 0)) return false; return true; } /** * \brief Unlock the device and free it for use for libcamera instances * * This function shall not be called from a pipeline handler implementation * directly, as the base PipelineHandler implementation handles this on the * behalf of the specified implementation. * * \sa lock() */ void MediaDevice::unlock() { if (!fd_.isValid()) return; lockf(fd_.get(), F_ULOCK, 0); } /** * \fn MediaDevice::busy() * \brief Check if a device is in use * \return true if the device has been claimed for exclusive use, or false if it * is available * \sa acquire(), release() */ /** * \brief Populate the MediaDevice with device information and media objects * * This function retrieves the media device information and enumerates all * media objects in the media device graph and creates their MediaObject * representations. All entities, pads and links are stored as MediaEntity, * MediaPad and MediaLink respectively, with cross-references between objects. * Interfaces are not processed. * * Entities are stored in a separate list in the MediaDevice to ease lookup, * while pads are accessible from the entity they belong to and links from the * pads they connect. * * \return 0 on success or a negative error code otherwise */ int MediaDevice::populate() { struct media_v2_topology topology = {}; struct media_v2_entity *ents = nullptr; struct media_v2_interface *interfaces = nullptr; struct media_v2_link *links = nullptr; struct media_v2_pad *pads = nullptr; __u64 version = -1; int ret; clear(); ret = open(); if (ret) return ret; struct media_device_info info = {}; ret = ioctl(fd_.get(), MEDIA_IOC_DEVICE_INFO, &info); if (ret) { ret = -errno; LOG(MediaDevice, Error) << "Failed to get media device info " << strerror(-ret); goto done; } driver_ = info.driver; model_ = info.model; version_ = info.media_version; hwRevision_ = info.hw_revision; /* * Keep calling G_TOPOLOGY until the version number stays stable. */ while (true) { topology.topology_version = 0; topology.ptr_entities = reinterpret_cast<uintptr_t>(ents); topology.ptr_interfaces = reinterpret_cast<uintptr_t>(interfaces); topology.ptr_links = reinterpret_cast<uintptr_t>(links); topology.ptr_pads = reinterpret_cast<uintptr_t>(pads); ret = ioctl(fd_.get(), MEDIA_IOC_G_TOPOLOGY, &topology); if (ret < 0) { ret = -errno; LOG(MediaDevice, Error) << "Failed to enumerate topology: " << strerror(-ret); goto done; } if (version == topology.topology_version) break; delete[] ents; delete[] interfaces; delete[] pads; delete[] links; ents = new struct media_v2_entity[topology.num_entities](); interfaces = new struct media_v2_interface[topology.num_interfaces](); links = new struct media_v2_link[topology.num_links](); pads = new struct media_v2_pad[topology.num_pads](); version = topology.topology_version; } /* Populate entities, pads and links. */ if (populateEntities(topology) && populatePads(topology) && populateLinks(topology)) valid_ = true; ret = 0; done: close(); delete[] ents; delete[] interfaces; delete[] pads; delete[] links; if (!valid_) { clear(); return -EINVAL; } return ret; } /** * \fn MediaDevice::isValid() * \brief Query whether the media graph has been populated and is valid * \return true if the media graph is valid, false otherwise */ /** * \fn MediaDevice::driver() * \brief Retrieve the media device driver name * \return The name of the kernel driver that handles the MediaDevice */ /** * \fn MediaDevice::deviceNode() * \brief Retrieve the media device node path * \return The MediaDevice deviceNode path */ /** * \fn MediaDevice::model() * \brief Retrieve the media device model name * \return The MediaDevice model name */ /** * \fn MediaDevice::version() * \brief Retrieve the media device API version * * The version is formatted with the KERNEL_VERSION() macro. * * \return The MediaDevice API version */ /** * \fn MediaDevice::hwRevision() * \brief Retrieve the media device hardware revision * * The hardware revision is in a driver-specific format. * * \return The MediaDevice hardware revision */ /** * \fn MediaDevice::entities() * \brief Retrieve the list of entities in the media graph * \return The list of MediaEntities registered in the MediaDevice */ /** * \brief Return the MediaEntity with name \a name * \param[in] name The entity name * \return The entity with \a name, or nullptr if no such entity is found */ MediaEntity *MediaDevice::getEntityByName(const std::string &name) const { for (MediaEntity *e : entities_) if (e->name() == name) return e; return nullptr; } /** * \brief Retrieve the MediaLink connecting two pads, identified by entity * names and pad indexes * \param[in] sourceName The source entity name * \param[in] sourceIdx The index of the source pad * \param[in] sinkName The sink entity name * \param[in] sinkIdx The index of the sink pad * * Find the link that connects the pads at index \a sourceIdx of the source * entity with name \a sourceName, to the pad at index \a sinkIdx of the * sink entity with name \a sinkName, if any. * * \sa link(const MediaEntity *source, unsigned int sourceIdx, * const MediaEntity *sink, unsigned int sinkIdx) * \sa link(const MediaPad *source, const MediaPad *sink) * * \return The link that connects the two pads, or nullptr if no such a link * exists */ MediaLink *MediaDevice::link(const std::string &sourceName, unsigned int sourceIdx, const std::string &sinkName, unsigned int sinkIdx) { const MediaEntity *source = getEntityByName(sourceName); const MediaEntity *sink = getEntityByName(sinkName); if (!source || !sink) return nullptr; return link(source, sourceIdx, sink, sinkIdx); } /** * \brief Retrieve the MediaLink connecting two pads, identified by the * entities they belong to and pad indexes * \param[in] source The source entity * \param[in] sourceIdx The index of the source pad * \param[in] sink The sink entity * \param[in] sinkIdx The index of the sink pad * * Find the link that connects the pads at index \a sourceIdx of the source * entity \a source, to the pad at index \a sinkIdx of the sink entity \a * sink, if any. * * \sa link(const std::string &sourceName, unsigned int sourceIdx, * const std::string &sinkName, unsigned int sinkIdx) * \sa link(const MediaPad *source, const MediaPad *sink) * * \return The link that connects the two pads, or nullptr if no such a link * exists */ MediaLink *MediaDevice::link(const MediaEntity *source, unsigned int sourceIdx, const MediaEntity *sink, unsigned int sinkIdx) { const MediaPad *sourcePad = source->getPadByIndex(sourceIdx); const MediaPad *sinkPad = sink->getPadByIndex(sinkIdx); if (!sourcePad || !sinkPad) return nullptr; return link(sourcePad, sinkPad); } /** * \brief Retrieve the MediaLink that connects two pads * \param[in] source The source pad * \param[in] sink The sink pad * * \sa link(const std::string &sourceName, unsigned int sourceIdx, * const std::string &sinkName, unsigned int sinkIdx) * \sa link(const MediaEntity *source, unsigned int sourceIdx, * const MediaEntity *sink, unsigned int sinkIdx) * * \return The link that connects the two pads, or nullptr if no such a link * exists */ MediaLink *MediaDevice::link(const MediaPad *source, const MediaPad *sink) { for (MediaLink *link : source->links()) { if (link->sink()->id() == sink->id()) return link; } return nullptr; } /** * \brief Disable all links in the media device * * Disable all the media device links, clearing the MEDIA_LNK_FL_ENABLED flag * on links which are not flagged as IMMUTABLE. * * \return 0 on success or a negative error code otherwise */ int MediaDevice::disableLinks() { for (MediaEntity *entity : entities_) { for (MediaPad *pad : entity->pads()) { if (!(pad->flags() & MEDIA_PAD_FL_SOURCE)) continue; for (MediaLink *link : pad->links()) { if (link->flags() & MEDIA_LNK_FL_IMMUTABLE) continue; int ret = link->setEnabled(false); if (ret) return ret; } } } return 0; } /** * \var MediaDevice::disconnected * \brief Signal emitted when the media device is disconnected from the system * * This signal is emitted when the device enumerator detects that the media * device has been removed from the system. For hot-pluggable devices this is * usually caused by physical device disconnection, but can also result from * driver unloading for most devices. The media device is passed as a parameter. */ /** * \brief Open the media device * * \return 0 on success or a negative error code otherwise * \retval -EBUSY Media device already open * \sa close() */ int MediaDevice::open() { if (fd_.isValid()) { LOG(MediaDevice, Error) << "MediaDevice already open"; return -EBUSY; } fd_ = UniqueFD(::open(deviceNode_.c_str(), O_RDWR | O_CLOEXEC)); if (!fd_.isValid()) { int ret = -errno; LOG(MediaDevice, Error) << "Failed to open media device at " << deviceNode_ << ": " << strerror(-ret); return ret; } return 0; } /** * \brief Close the media device * * This function closes the media device node. It does not invalidate the media * graph and all cached media objects remain valid and can be accessed normally. * Once closed no operation interacting with the media device node can be * performed until the device is opened again. * * Closing an already closed device is allowed and will not perform any * operation. * * \sa open() */ void MediaDevice::close() { fd_.reset(); } /** * \var MediaDevice::objects_ * \brief Global map of media objects (entities, pads, links) keyed by their * object id. */ /** * \brief Retrieve the media graph object specified by \a id * \return The graph object, or nullptr if no object with \a id is found */ MediaObject *MediaDevice::object(unsigned int id) { auto it = objects_.find(id); return (it == objects_.end()) ? nullptr : it->second; } /** * \brief Add a media object to the media graph * * If the \a object has a unique id it is added to the media graph, and its * lifetime will be managed by the media device. Otherwise the object isn't * added to the graph and the caller must delete it. * * \return true if the object was successfully added to the graph and false * otherwise */ bool MediaDevice::addObject(MediaObject *object) { if (objects_.find(object->id()) != objects_.end()) { LOG(MediaDevice, Error) << "Element with id " << object->id() << " already enumerated."; return false; } objects_[object->id()] = object; return true; } /** * \brief Delete all graph objects in the media device * * Clear the media graph and delete all the objects it contains. After this * function returns any previously obtained pointer to a media graph object * becomes invalid. * * The media device graph state is reset to invalid when the graph is cleared. * * \sa isValid() */ void MediaDevice::clear() { for (auto const &o : objects_) delete o.second; objects_.clear(); entities_.clear(); valid_ = false; } /** * \var MediaDevice::entities_ * \brief Global list of media entities in the media graph */ /** * \brief Find the interface associated with an entity * \param[in] topology The media topology as returned by MEDIA_IOC_G_TOPOLOGY * \param[in] entityId The entity id * \return A pointer to the interface if found, or nullptr otherwise */ struct media_v2_interface *MediaDevice::findInterface(const struct media_v2_topology &topology, unsigned int entityId) { struct media_v2_link *links = reinterpret_cast<struct media_v2_link *> (topology.ptr_links); unsigned int ifaceId = 0; unsigned int i; for (i = 0; i < topology.num_links; ++i) { /* Search for the interface to entity link. */ if (links[i].sink_id != entityId) continue; if ((links[i].flags & MEDIA_LNK_FL_LINK_TYPE) != MEDIA_LNK_FL_INTERFACE_LINK) continue; ifaceId = links[i].source_id; break; } if (i == topology.num_links) return nullptr; struct media_v2_interface *ifaces = reinterpret_cast<struct media_v2_interface *> (topology.ptr_interfaces); for (i = 0; i < topology.num_interfaces; ++i) { if (ifaces[i].id == ifaceId) return &ifaces[i]; } return nullptr; } /* * For each entity in the media graph create a MediaEntity and store a * reference in the media device objects map and entities list. */ bool MediaDevice::populateEntities(const struct media_v2_topology &topology) { struct media_v2_entity *mediaEntities = reinterpret_cast<struct media_v2_entity *> (topology.ptr_entities); for (unsigned int i = 0; i < topology.num_entities; ++i) { struct media_v2_entity *ent = &mediaEntities[i]; /* * The media_v2_entity structure was missing the flag field before * v4.19. */ if (!MEDIA_V2_ENTITY_HAS_FLAGS(version_)) fixupEntityFlags(ent); /* * Find the interface linked to this entity to get the device * node major and minor numbers. */ struct media_v2_interface *iface = findInterface(topology, ent->id); MediaEntity *entity = new MediaEntity(this, ent, iface); if (!addObject(entity)) { delete entity; return false; } entities_.push_back(entity); } return true; } bool MediaDevice::populatePads(const struct media_v2_topology &topology) { struct media_v2_pad *mediaPads = reinterpret_cast<struct media_v2_pad *> (topology.ptr_pads); for (unsigned int i = 0; i < topology.num_pads; ++i) { unsigned int entity_id = mediaPads[i].entity_id; /* Store a reference to this MediaPad in entity. */ MediaEntity *mediaEntity = dynamic_cast<MediaEntity *> (object(entity_id)); if (!mediaEntity) { LOG(MediaDevice, Error) << "Failed to find entity with id: " << entity_id; return false; } MediaPad *pad = new MediaPad(&mediaPads[i], mediaEntity); if (!addObject(pad)) { delete pad; return false; } mediaEntity->addPad(pad); } return true; } bool MediaDevice::populateLinks(const struct media_v2_topology &topology) { struct media_v2_link *mediaLinks = reinterpret_cast<struct media_v2_link *> (topology.ptr_links); for (unsigned int i = 0; i < topology.num_links; ++i) { if ((mediaLinks[i].flags & MEDIA_LNK_FL_LINK_TYPE) == MEDIA_LNK_FL_INTERFACE_LINK) continue; /* Look up the source and sink objects. */ unsigned int source_id = mediaLinks[i].source_id; MediaObject *source = object(source_id); if (!source) { LOG(MediaDevice, Error) << "Failed to find MediaObject with id " << source_id; return false; } unsigned int sink_id = mediaLinks[i].sink_id; MediaObject *sink = object(sink_id); if (!sink) { LOG(MediaDevice, Error) << "Failed to find MediaObject with id " << sink_id; return false; } switch (mediaLinks[i].flags & MEDIA_LNK_FL_LINK_TYPE) { case MEDIA_LNK_FL_DATA_LINK: { MediaPad *sourcePad = dynamic_cast<MediaPad *>(source); MediaPad *sinkPad = dynamic_cast<MediaPad *>(sink); if (!source || !sink) { LOG(MediaDevice, Error) << "Source or sink is not a pad"; return false; } MediaLink *link = new MediaLink(&mediaLinks[i], sourcePad, sinkPad); if (!addObject(link)) { delete link; return false; } link->source()->addLink(link); link->sink()->addLink(link); break; } case MEDIA_LNK_FL_ANCILLARY_LINK: { MediaEntity *primary = dynamic_cast<MediaEntity *>(source); MediaEntity *ancillary = dynamic_cast<MediaEntity *>(sink); if (!primary || !ancillary) { LOG(MediaDevice, Error) << "Source or sink is not an entity"; return false; } primary->addAncillaryEntity(ancillary); break; } default: LOG(MediaDevice, Warning) << "Unknown media link type"; break; } } return true; } /** * \brief Fixup entity flags using the legacy API * \param[in] entity The entity * * This function is used as a fallback to query entity flags using the legacy * MEDIA_IOC_ENUM_ENTITIES ioctl when running on a kernel version that doesn't * provide them through the MEDIA_IOC_G_TOPOLOGY ioctl. */ void MediaDevice::fixupEntityFlags(struct media_v2_entity *entity) { struct media_entity_desc desc = {}; desc.id = entity->id; int ret = ioctl(fd_.get(), MEDIA_IOC_ENUM_ENTITIES, &desc); if (ret < 0) { ret = -errno; LOG(MediaDevice, Debug) << "Failed to retrieve information for entity " << entity->id << ": " << strerror(-ret); return; } entity->flags = desc.flags; } /** * \brief Apply \a flags to a link between two pads * \param[in] link The link to apply flags to * \param[in] flags The flags to apply to the link * * This function applies the link \a flags (as defined by the MEDIA_LNK_FL_* * macros from the Media Controller API) to the given \a link. It implements * low-level link setup as it performs no checks on the validity of the \a * flags, and assumes that the supplied \a flags are valid for the link (e.g. * immutable links cannot be disabled). * * \sa MediaLink::setEnabled(bool enable) * * \return 0 on success or a negative error code otherwise */ int MediaDevice::setupLink(const MediaLink *link, unsigned int flags) { struct media_link_desc linkDesc = {}; MediaPad *source = link->source(); MediaPad *sink = link->sink(); linkDesc.source.entity = source->entity()->id(); linkDesc.source.index = source->index(); linkDesc.source.flags = MEDIA_PAD_FL_SOURCE; linkDesc.sink.entity = sink->entity()->id(); linkDesc.sink.index = sink->index(); linkDesc.sink.flags = MEDIA_PAD_FL_SINK; linkDesc.flags = flags; int ret = ioctl(fd_.get(), MEDIA_IOC_SETUP_LINK, &linkDesc); if (ret) { ret = -errno; LOG(MediaDevice, Error) << "Failed to setup link " << source->entity()->name() << "[" << source->index() << "] -> " << sink->entity()->name() << "[" << sink->index() << "]: " << strerror(-ret); return ret; } LOG(MediaDevice, Debug) << source->entity()->name() << "[" << source->index() << "] -> " << sink->entity()->name() << "[" << sink->index() << "]: " << flags; return 0; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/v4l2_device.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Common base for V4L2 video devices and subdevices */ #include "libcamera/internal/v4l2_device.h" #include <fcntl.h> #include <iomanip> #include <limits.h> #include <map> #include <stdlib.h> #include <string.h> #include <sys/ioctl.h> #include <sys/syscall.h> #include <unistd.h> #include <vector> #include <linux/v4l2-mediabus.h> #include <libcamera/base/event_notifier.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/sysfs.h" /** * \file v4l2_device.h * \brief Common base for V4L2 devices and subdevices */ namespace libcamera { LOG_DEFINE_CATEGORY(V4L2) /** * \class V4L2Device * \brief Base class for V4L2VideoDevice and V4L2Subdevice * * The V4L2Device class groups together the functions and fields common to * both the V4L2VideoDevice and V4L2Subdevice classes, and provides a base * class with functions to open and close the device node associated with the * device and to perform IOCTL system calls on it. * * The V4L2Device class cannot be instantiated directly, as its constructor * is protected. Users should instead create instances of one the derived * classes to model either a V4L2 video device or a V4L2 subdevice. */ /** * \brief Construct a V4L2Device * \param[in] deviceNode The device node filesystem path * * Initialize the file descriptor to -1 and store the \a deviceNode to be used * at open() time, and the \a logTag to prefix log messages with. */ V4L2Device::V4L2Device(const std::string &deviceNode) : deviceNode_(deviceNode), fdEventNotifier_(nullptr), frameStartEnabled_(false) { } /** * \brief Destroy a V4L2Device */ V4L2Device::~V4L2Device() { } /** * \brief Open a V4L2 device node * \param[in] flags Access mode flags * * Open the device node path with the provided access mode \a flags and * initialize the file descriptor, which was initially set to -1. * * \return 0 on success or a negative error code otherwise */ int V4L2Device::open(unsigned int flags) { if (isOpen()) { LOG(V4L2, Error) << "Device already open"; return -EBUSY; } UniqueFD fd(syscall(SYS_openat, AT_FDCWD, deviceNode_.c_str(), flags | O_CLOEXEC)); if (!fd.isValid()) { int ret = -errno; LOG(V4L2, Error) << "Failed to open V4L2 device '" << deviceNode_ << "': " << strerror(-ret); return ret; } setFd(std::move(fd)); return 0; } /** * \brief Set the file descriptor of a V4L2 device * \param[in] fd The file descriptor handle * * This function allows a device to provide an already opened file descriptor * referring to the V4L2 device node, instead of opening it with open(). This * can be used for V4L2 M2M devices where a single video device node is used for * both the output and capture devices, or when receiving an open file * descriptor in a context that doesn't have permission to open the device node * itself. * * This function and the open() function are mutually exclusive, only one of the * two shall be used for a V4L2Device instance. * * \return 0 on success or a negative error code otherwise */ int V4L2Device::setFd(UniqueFD fd) { if (isOpen()) return -EBUSY; fd_ = std::move(fd); fdEventNotifier_ = new EventNotifier(fd_.get(), EventNotifier::Exception); fdEventNotifier_->activated.connect(this, &V4L2Device::eventAvailable); fdEventNotifier_->setEnabled(false); listControls(); return 0; } /** * \brief Close the device node * * Reset the file descriptor to -1 */ void V4L2Device::close() { if (!isOpen()) return; delete fdEventNotifier_; fd_.reset(); } /** * \fn V4L2Device::isOpen() * \brief Check if the V4L2 device node is open * \return True if the V4L2 device node is open, false otherwise */ /** * \fn V4L2Device::controls() * \brief Retrieve the supported V4L2 controls and their information * \return A map of the V4L2 controls supported by the device */ /** * \brief Read controls from the device * \param[in] ids The list of controls to read, specified by their ID * * This function reads the value of all controls contained in \a ids, and * returns their values as a ControlList. * * If any control in \a ids is not supported by the device, is disabled (i.e. * has the V4L2_CTRL_FLAG_DISABLED flag set), or if any other error occurs * during validation of the requested controls, no control is read and this * function returns an empty control list. * * \return The control values in a ControlList on success, or an empty list on * error */ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids) { if (ids.empty()) return {}; ControlList ctrls{ controls_ }; for (uint32_t id : ids) { const auto iter = controls_.find(id); if (iter == controls_.end()) { LOG(V4L2, Error) << "Control " << utils::hex(id) << " not found"; return {}; } ctrls.set(id, {}); } std::vector<v4l2_ext_control> v4l2Ctrls(ids.size()); memset(v4l2Ctrls.data(), 0, sizeof(v4l2_ext_control) * ctrls.size()); unsigned int i = 0; for (auto &ctrl : ctrls) { unsigned int id = ctrl.first; const struct v4l2_query_ext_ctrl &info = controlInfo_[id]; v4l2_ext_control &v4l2Ctrl = v4l2Ctrls[i++]; v4l2Ctrl.id = id; if (info.flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) { ControlType type; switch (info.type) { case V4L2_CTRL_TYPE_U8: type = ControlTypeByte; break; default: LOG(V4L2, Error) << "Unsupported payload control type " << info.type; return {}; } ControlValue &value = ctrl.second; value.reserve(type, true, info.elems); Span<uint8_t> data = value.data(); v4l2Ctrl.p_u8 = data.data(); v4l2Ctrl.size = data.size(); } } struct v4l2_ext_controls v4l2ExtCtrls = {}; v4l2ExtCtrls.which = V4L2_CTRL_WHICH_CUR_VAL; v4l2ExtCtrls.controls = v4l2Ctrls.data(); v4l2ExtCtrls.count = v4l2Ctrls.size(); int ret = ioctl(VIDIOC_G_EXT_CTRLS, &v4l2ExtCtrls); if (ret) { unsigned int errorIdx = v4l2ExtCtrls.error_idx; /* Generic validation error. */ if (errorIdx == 0 || errorIdx >= v4l2Ctrls.size()) { LOG(V4L2, Error) << "Unable to read controls: " << strerror(-ret); return {}; } /* A specific control failed. */ const unsigned int id = v4l2Ctrls[errorIdx].id; LOG(V4L2, Error) << "Unable to read control " << utils::hex(id) << ": " << strerror(-ret); v4l2Ctrls.resize(errorIdx); } updateControls(&ctrls, v4l2Ctrls); return ctrls; } /** * \brief Write controls to the device * \param[in] ctrls The list of controls to write * * This function writes the value of all controls contained in \a ctrls, and * stores the values actually applied to the device in the corresponding * \a ctrls entry. * * If any control in \a ctrls is not supported by the device, is disabled (i.e. * has the V4L2_CTRL_FLAG_DISABLED flag set), is read-only, if any other error * occurs during validation of the requested controls, no control is written and * this function returns -EINVAL. * * If an error occurs while writing the controls, the index of the first * control that couldn't be written is returned. All controls below that index * are written and their values are updated in \a ctrls, while all other * controls are not written and their values are not changed. * * \return 0 on success or an error code otherwise * \retval -EINVAL One of the control is not supported or not accessible * \retval i The index of the control that failed */ int V4L2Device::setControls(ControlList *ctrls) { if (ctrls->empty()) return 0; std::vector<v4l2_ext_control> v4l2Ctrls(ctrls->size()); memset(v4l2Ctrls.data(), 0, sizeof(v4l2_ext_control) * ctrls->size()); for (auto [ctrl, i] = std::pair(ctrls->begin(), 0u); i < ctrls->size(); ctrl++, i++) { const unsigned int id = ctrl->first; const auto iter = controls_.find(id); if (iter == controls_.end()) { LOG(V4L2, Error) << "Control " << utils::hex(id) << " not found"; return -EINVAL; } v4l2_ext_control &v4l2Ctrl = v4l2Ctrls[i]; v4l2Ctrl.id = id; /* Set the v4l2_ext_control value for the write operation. */ ControlValue &value = ctrl->second; switch (iter->first->type()) { case ControlTypeInteger32: { if (value.isArray()) { Span<uint8_t> data = value.data(); v4l2Ctrl.p_u32 = reinterpret_cast<uint32_t *>(data.data()); v4l2Ctrl.size = data.size(); } else { v4l2Ctrl.value = value.get<int32_t>(); } break; } case ControlTypeInteger64: v4l2Ctrl.value64 = value.get<int64_t>(); break; case ControlTypeByte: { if (!value.isArray()) { LOG(V4L2, Error) << "Control " << utils::hex(id) << " requires an array value"; return -EINVAL; } Span<uint8_t> data = value.data(); v4l2Ctrl.p_u8 = data.data(); v4l2Ctrl.size = data.size(); break; } default: /* \todo To be changed to support strings. */ v4l2Ctrl.value = value.get<int32_t>(); break; } } struct v4l2_ext_controls v4l2ExtCtrls = {}; v4l2ExtCtrls.which = V4L2_CTRL_WHICH_CUR_VAL; v4l2ExtCtrls.controls = v4l2Ctrls.data(); v4l2ExtCtrls.count = v4l2Ctrls.size(); int ret = ioctl(VIDIOC_S_EXT_CTRLS, &v4l2ExtCtrls); if (ret) { unsigned int errorIdx = v4l2ExtCtrls.error_idx; /* Generic validation error. */ if (errorIdx == 0 || errorIdx >= v4l2Ctrls.size()) { LOG(V4L2, Error) << "Unable to set controls: " << strerror(-ret); return -EINVAL; } /* A specific control failed. */ const unsigned int id = v4l2Ctrls[errorIdx].id; LOG(V4L2, Error) << "Unable to set control " << utils::hex(id) << ": " << strerror(-ret); v4l2Ctrls.resize(errorIdx); ret = errorIdx; } updateControls(ctrls, v4l2Ctrls); return ret; } /** * \brief Retrieve the v4l2_query_ext_ctrl information for the given control * \param[in] id The V4L2 control id * \return A pointer to the v4l2_query_ext_ctrl structure for the given * control, or a null pointer if not found */ const struct v4l2_query_ext_ctrl *V4L2Device::controlInfo(uint32_t id) const { const auto it = controlInfo_.find(id); if (it == controlInfo_.end()) return nullptr; return &it->second; } /** * \brief Retrieve the device path in sysfs * * This function returns the sysfs path to the physical device backing the V4L2 * device. The path is guaranteed to be an absolute path, without any symbolic * link. * * It includes the sysfs mount point prefix * * \return The device path in sysfs */ std::string V4L2Device::devicePath() const { std::string devicePath = sysfs::charDevPath(deviceNode_) + "/device"; char *realPath = realpath(devicePath.c_str(), nullptr); if (!realPath) { LOG(V4L2, Fatal) << "Can not resolve device path for " << devicePath; return {}; } std::string path{ realPath }; free(realPath); return path; } /** * \brief Enable or disable frame start event notification * \param[in] enable True to enable frame start events, false to disable them * * This function enables or disables generation of frame start events. Once * enabled, the events are signalled through the frameStart signal. * * \return 0 on success, a negative error code otherwise */ int V4L2Device::setFrameStartEnabled(bool enable) { if (frameStartEnabled_ == enable) return 0; struct v4l2_event_subscription event{}; event.type = V4L2_EVENT_FRAME_SYNC; unsigned long request = enable ? VIDIOC_SUBSCRIBE_EVENT : VIDIOC_UNSUBSCRIBE_EVENT; int ret = ioctl(request, &event); if (enable && ret) return ret; fdEventNotifier_->setEnabled(enable); frameStartEnabled_ = enable; return ret; } /** * \var V4L2Device::frameStart * \brief A Signal emitted when capture of a frame has started */ /** * \brief Perform an IOCTL system call on the device node * \param[in] request The IOCTL request code * \param[in] argp A pointer to the IOCTL argument * \return 0 on success or a negative error code otherwise */ int V4L2Device::ioctl(unsigned long request, void *argp) { /* * Printing out an error message is usually better performed * in the caller, which can provide more context. */ if (::ioctl(fd_.get(), request, argp) < 0) return -errno; return 0; } /** * \fn V4L2Device::deviceNode() * \brief Retrieve the device node path * \return The device node path */ /** * \fn V4L2Device::fd() * \brief Retrieve the V4L2 device file descriptor * \return The V4L2 device file descriptor, -1 if the device node is not open */ /** * \brief Retrieve the libcamera control type associated with the V4L2 control * \param[in] ctrlType The V4L2 control type * \return The ControlType associated to \a ctrlType */ ControlType V4L2Device::v4l2CtrlType(uint32_t ctrlType) { switch (ctrlType) { case V4L2_CTRL_TYPE_U8: return ControlTypeByte; case V4L2_CTRL_TYPE_BOOLEAN: return ControlTypeBool; case V4L2_CTRL_TYPE_INTEGER: return ControlTypeInteger32; case V4L2_CTRL_TYPE_INTEGER64: return ControlTypeInteger64; case V4L2_CTRL_TYPE_MENU: case V4L2_CTRL_TYPE_BUTTON: case V4L2_CTRL_TYPE_BITMASK: case V4L2_CTRL_TYPE_INTEGER_MENU: /* * More precise types may be needed, for now use a 32-bit * integer type. */ return ControlTypeInteger32; default: return ControlTypeNone; } } /** * \brief Create a ControlId for a V4L2 control * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 control * \return A ControlId associated to \a ctrl */ std::unique_ptr<ControlId> V4L2Device::v4l2ControlId(const v4l2_query_ext_ctrl &ctrl) { const size_t len = strnlen(ctrl.name, sizeof(ctrl.name)); const std::string name(static_cast<const char *>(ctrl.name), len); const ControlType type = v4l2CtrlType(ctrl.type); return std::make_unique<ControlId>(ctrl.id, name, type); } /** * \brief Create a ControlInfo for a V4L2 control * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 control * \return A ControlInfo that represents \a ctrl */ std::optional<ControlInfo> V4L2Device::v4l2ControlInfo(const v4l2_query_ext_ctrl &ctrl) { switch (ctrl.type) { case V4L2_CTRL_TYPE_U8: return ControlInfo(static_cast<uint8_t>(ctrl.minimum), static_cast<uint8_t>(ctrl.maximum), static_cast<uint8_t>(ctrl.default_value)); case V4L2_CTRL_TYPE_BOOLEAN: return ControlInfo(static_cast<bool>(ctrl.minimum), static_cast<bool>(ctrl.maximum), static_cast<bool>(ctrl.default_value)); case V4L2_CTRL_TYPE_INTEGER64: return ControlInfo(static_cast<int64_t>(ctrl.minimum), static_cast<int64_t>(ctrl.maximum), static_cast<int64_t>(ctrl.default_value)); case V4L2_CTRL_TYPE_INTEGER_MENU: case V4L2_CTRL_TYPE_MENU: return v4l2MenuControlInfo(ctrl); default: return ControlInfo(static_cast<int32_t>(ctrl.minimum), static_cast<int32_t>(ctrl.maximum), static_cast<int32_t>(ctrl.default_value)); } } /** * \brief Create ControlInfo for a V4L2 menu control * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 menu control * * The created ControlInfo contains indices acquired by VIDIOC_QUERYMENU. * * \return A ControlInfo that represents \a ctrl */ std::optional<ControlInfo> V4L2Device::v4l2MenuControlInfo(const struct v4l2_query_ext_ctrl &ctrl) { std::vector<ControlValue> indices; struct v4l2_querymenu menu = {}; menu.id = ctrl.id; if (ctrl.minimum < 0) return std::nullopt; for (int32_t index = ctrl.minimum; index <= ctrl.maximum; ++index) { menu.index = index; if (ioctl(VIDIOC_QUERYMENU, &menu) != 0) continue; indices.push_back(index); } /* * Some faulty UVC devices are known to return an empty menu control. * Controls without a menu option can not be set, or read, so they are * not exposed. */ if (indices.size() == 0) return std::nullopt; return ControlInfo(indices, ControlValue(static_cast<int32_t>(ctrl.default_value))); } /* * \brief List and store information about all controls supported by the * V4L2 device */ void V4L2Device::listControls() { ControlInfoMap::Map ctrls; struct v4l2_query_ext_ctrl ctrl = {}; while (1) { ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND; if (ioctl(VIDIOC_QUERY_EXT_CTRL, &ctrl)) break; if (ctrl.type == V4L2_CTRL_TYPE_CTRL_CLASS || ctrl.flags & V4L2_CTRL_FLAG_DISABLED) continue; switch (ctrl.type) { case V4L2_CTRL_TYPE_INTEGER: case V4L2_CTRL_TYPE_BOOLEAN: case V4L2_CTRL_TYPE_MENU: case V4L2_CTRL_TYPE_BUTTON: case V4L2_CTRL_TYPE_INTEGER64: case V4L2_CTRL_TYPE_BITMASK: case V4L2_CTRL_TYPE_INTEGER_MENU: case V4L2_CTRL_TYPE_U8: break; /* \todo Support other control types. */ default: LOG(V4L2, Debug) << "Control " << utils::hex(ctrl.id) << " has unsupported type " << ctrl.type; continue; } LOG(V4L2, Debug) << "Control: " << ctrl.name << " (" << utils::hex(ctrl.id) << ")"; controlIds_.emplace_back(v4l2ControlId(ctrl)); controlIdMap_[ctrl.id] = controlIds_.back().get(); controlInfo_.emplace(ctrl.id, ctrl); std::optional<ControlInfo> info = v4l2ControlInfo(ctrl); if (!info) { LOG(V4L2, Error) << "Control " << ctrl.name << " cannot be registered"; continue; } ctrls.emplace(controlIds_.back().get(), *info); } controls_ = ControlInfoMap(std::move(ctrls), controlIdMap_); } /** * \brief Update the information for all device controls * * The V4L2Device class caches information about all controls supported by the * device and exposes it through the controls() and controlInfo() functions. * Control information may change at runtime, for instance when formats on a * subdev are modified. When this occurs, this function can be used to refresh * control information. The information is refreshed in-place, all pointers to * v4l2_query_ext_ctrl instances previously returned by controlInfo() and * iterators to the ControlInfoMap returned by controls() remain valid. * * Note that control information isn't refreshed automatically is it may be an * expensive operation. The V4L2Device users are responsible for calling this * function when required, based on their usage pattern of the class. */ void V4L2Device::updateControlInfo() { for (auto &[controlId, info] : controls_) { unsigned int id = controlId->id(); /* * Assume controlInfo_ has a corresponding entry, as it has been * generated by listControls(). */ struct v4l2_query_ext_ctrl &ctrl = controlInfo_[id]; if (ioctl(VIDIOC_QUERY_EXT_CTRL, &ctrl)) { LOG(V4L2, Debug) << "Could not refresh control " << utils::hex(id); continue; } info = *v4l2ControlInfo(ctrl); } } /* * \brief Update the value of the first \a count V4L2 controls in \a ctrls using * values in \a v4l2Ctrls * \param[inout] ctrls List of V4L2 controls to update * \param[in] v4l2Ctrls List of V4L2 extended controls as returned by the driver */ void V4L2Device::updateControls(ControlList *ctrls, Span<const v4l2_ext_control> v4l2Ctrls) { for (const v4l2_ext_control &v4l2Ctrl : v4l2Ctrls) { const unsigned int id = v4l2Ctrl.id; ControlValue value = ctrls->get(id); if (value.isArray()) { /* * No action required, the VIDIOC_[GS]_EXT_CTRLS ioctl * accessed the ControlValue storage directly for array * controls. */ continue; } const auto iter = controls_.find(id); ASSERT(iter != controls_.end()); switch (iter->first->type()) { case ControlTypeInteger64: value.set<int64_t>(v4l2Ctrl.value64); break; default: /* * Note: this catches the ControlTypeInteger32 case. * * \todo To be changed when support for string controls * will be added. */ value.set<int32_t>(v4l2Ctrl.value); break; } ctrls->set(id, value); } } /** * \brief Slot to handle V4L2 events from the V4L2 device * * When this slot is called, a V4L2 event is available to be dequeued from the * device. */ void V4L2Device::eventAvailable() { struct v4l2_event event{}; int ret = ioctl(VIDIOC_DQEVENT, &event); if (ret < 0) { LOG(V4L2, Error) << "Failed to dequeue event, disabling event notifier"; fdEventNotifier_->setEnabled(false); return; } if (event.type != V4L2_EVENT_FRAME_SYNC) { LOG(V4L2, Error) << "Spurious event (" << event.type << "), disabling event notifier"; fdEventNotifier_->setEnabled(false); return; } frameStart.emit(event.u.frame_sync.frame_sequence); } static const std::map<uint32_t, ColorSpace> v4l2ToColorSpace = { { V4L2_COLORSPACE_RAW, ColorSpace::Raw }, { V4L2_COLORSPACE_SRGB, { ColorSpace::Primaries::Rec709, ColorSpace::TransferFunction::Srgb, ColorSpace::YcbcrEncoding::Rec601, ColorSpace::Range::Limited } }, { V4L2_COLORSPACE_JPEG, ColorSpace::Sycc }, { V4L2_COLORSPACE_SMPTE170M, ColorSpace::Smpte170m }, { V4L2_COLORSPACE_REC709, ColorSpace::Rec709 }, { V4L2_COLORSPACE_BT2020, ColorSpace::Rec2020 }, }; static const std::map<uint32_t, ColorSpace::TransferFunction> v4l2ToTransferFunction = { { V4L2_XFER_FUNC_NONE, ColorSpace::TransferFunction::Linear }, { V4L2_XFER_FUNC_SRGB, ColorSpace::TransferFunction::Srgb }, { V4L2_XFER_FUNC_709, ColorSpace::TransferFunction::Rec709 }, }; static const std::map<uint32_t, ColorSpace::YcbcrEncoding> v4l2ToYcbcrEncoding = { { V4L2_YCBCR_ENC_601, ColorSpace::YcbcrEncoding::Rec601 }, { V4L2_YCBCR_ENC_709, ColorSpace::YcbcrEncoding::Rec709 }, { V4L2_YCBCR_ENC_BT2020, ColorSpace::YcbcrEncoding::Rec2020 }, }; static const std::map<uint32_t, ColorSpace::Range> v4l2ToRange = { { V4L2_QUANTIZATION_FULL_RANGE, ColorSpace::Range::Full }, { V4L2_QUANTIZATION_LIM_RANGE, ColorSpace::Range::Limited }, }; static const std::vector<std::pair<ColorSpace, v4l2_colorspace>> colorSpaceToV4l2 = { { ColorSpace::Raw, V4L2_COLORSPACE_RAW }, { ColorSpace::Sycc, V4L2_COLORSPACE_JPEG }, { ColorSpace::Smpte170m, V4L2_COLORSPACE_SMPTE170M }, { ColorSpace::Rec709, V4L2_COLORSPACE_REC709 }, { ColorSpace::Rec2020, V4L2_COLORSPACE_BT2020 }, }; static const std::map<ColorSpace::Primaries, v4l2_colorspace> primariesToV4l2 = { { ColorSpace::Primaries::Raw, V4L2_COLORSPACE_RAW }, { ColorSpace::Primaries::Smpte170m, V4L2_COLORSPACE_SMPTE170M }, { ColorSpace::Primaries::Rec709, V4L2_COLORSPACE_REC709 }, { ColorSpace::Primaries::Rec2020, V4L2_COLORSPACE_BT2020 }, }; static const std::map<ColorSpace::TransferFunction, v4l2_xfer_func> transferFunctionToV4l2 = { { ColorSpace::TransferFunction::Linear, V4L2_XFER_FUNC_NONE }, { ColorSpace::TransferFunction::Srgb, V4L2_XFER_FUNC_SRGB }, { ColorSpace::TransferFunction::Rec709, V4L2_XFER_FUNC_709 }, }; static const std::map<ColorSpace::YcbcrEncoding, v4l2_ycbcr_encoding> ycbcrEncodingToV4l2 = { /* V4L2 has no "none" encoding. */ { ColorSpace::YcbcrEncoding::None, V4L2_YCBCR_ENC_DEFAULT }, { ColorSpace::YcbcrEncoding::Rec601, V4L2_YCBCR_ENC_601 }, { ColorSpace::YcbcrEncoding::Rec709, V4L2_YCBCR_ENC_709 }, { ColorSpace::YcbcrEncoding::Rec2020, V4L2_YCBCR_ENC_BT2020 }, }; static const std::map<ColorSpace::Range, v4l2_quantization> rangeToV4l2 = { { ColorSpace::Range::Full, V4L2_QUANTIZATION_FULL_RANGE }, { ColorSpace::Range::Limited, V4L2_QUANTIZATION_LIM_RANGE }, }; /** * \brief Convert the color space fields in a V4L2 format to a ColorSpace * \param[in] v4l2Format A V4L2 format containing color space information * \param[in] colourEncoding Type of colour encoding * * The colorspace, ycbcr_enc, xfer_func and quantization fields within a * V4L2 format structure are converted to a corresponding ColorSpace. * * If any V4L2 fields are not recognised then we return an "unset" * color space. * * \return The ColorSpace corresponding to the input V4L2 format * \retval std::nullopt One or more V4L2 color space fields were not recognised */ template<typename T> std::optional<ColorSpace> V4L2Device::toColorSpace(const T &v4l2Format, PixelFormatInfo::ColourEncoding colourEncoding) { auto itColor = v4l2ToColorSpace.find(v4l2Format.colorspace); if (itColor == v4l2ToColorSpace.end()) return std::nullopt; /* This sets all the color space fields to the correct "default" values. */ ColorSpace colorSpace = itColor->second; if (v4l2Format.xfer_func != V4L2_XFER_FUNC_DEFAULT) { auto itTransfer = v4l2ToTransferFunction.find(v4l2Format.xfer_func); if (itTransfer == v4l2ToTransferFunction.end()) return std::nullopt; colorSpace.transferFunction = itTransfer->second; } if (v4l2Format.ycbcr_enc != V4L2_YCBCR_ENC_DEFAULT) { auto itYcbcrEncoding = v4l2ToYcbcrEncoding.find(v4l2Format.ycbcr_enc); if (itYcbcrEncoding == v4l2ToYcbcrEncoding.end()) return std::nullopt; colorSpace.ycbcrEncoding = itYcbcrEncoding->second; /* * V4L2 has no "none" encoding, override the value returned by * the kernel for non-YUV formats as YCbCr encoding isn't * applicable in that case. */ if (colourEncoding != PixelFormatInfo::ColourEncodingYUV) colorSpace.ycbcrEncoding = ColorSpace::YcbcrEncoding::None; } if (v4l2Format.quantization != V4L2_QUANTIZATION_DEFAULT) { auto itRange = v4l2ToRange.find(v4l2Format.quantization); if (itRange == v4l2ToRange.end()) return std::nullopt; colorSpace.range = itRange->second; /* * "Limited" quantization range is only meant for YUV formats. * Override the range to "Full" for all other formats. */ if (colourEncoding != PixelFormatInfo::ColourEncodingYUV) colorSpace.range = ColorSpace::Range::Full; } return colorSpace; } template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format &, PixelFormatInfo::ColourEncoding); template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format_mplane &, PixelFormatInfo::ColourEncoding); template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_mbus_framefmt &, PixelFormatInfo::ColourEncoding); /** * \brief Fill in the color space fields of a V4L2 format from a ColorSpace * \param[in] colorSpace The ColorSpace to be converted * \param[out] v4l2Format A V4L2 format containing color space information * * The colorspace, ycbcr_enc, xfer_func and quantization fields within a * V4L2 format structure are filled in from a corresponding ColorSpace. * * An error is returned if any of the V4L2 fields do not support the * value given in the ColorSpace. Such fields are set to the V4L2 * "default" values, but all other fields are still filled in where * possible. * * If the color space is completely unset, "default" V4L2 values are used * everywhere, so a driver would then choose its preferred color space. * * \return 0 on success or a negative error code otherwise * \retval -EINVAL The ColorSpace does not have a representation using V4L2 enums */ template<typename T> int V4L2Device::fromColorSpace(const std::optional<ColorSpace> &colorSpace, T &v4l2Format) { v4l2Format.colorspace = V4L2_COLORSPACE_DEFAULT; v4l2Format.xfer_func = V4L2_XFER_FUNC_DEFAULT; v4l2Format.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; v4l2Format.quantization = V4L2_QUANTIZATION_DEFAULT; if (!colorSpace) return 0; auto itColor = std::find_if(colorSpaceToV4l2.begin(), colorSpaceToV4l2.end(), [&colorSpace](const auto &item) { return colorSpace == item.first; }); if (itColor != colorSpaceToV4l2.end()) { v4l2Format.colorspace = itColor->second; /* Leaving all the other fields as "default" should be fine. */ return 0; } /* * If the colorSpace doesn't precisely match a standard color space, * then we must choose a V4L2 colorspace with matching primaries. */ int ret = 0; auto itPrimaries = primariesToV4l2.find(colorSpace->primaries); if (itPrimaries != primariesToV4l2.end()) { v4l2Format.colorspace = itPrimaries->second; } else { libcamera::LOG(V4L2, Warning) << "Unrecognised primaries in " << ColorSpace::toString(colorSpace); ret = -EINVAL; } auto itTransfer = transferFunctionToV4l2.find(colorSpace->transferFunction); if (itTransfer != transferFunctionToV4l2.end()) { v4l2Format.xfer_func = itTransfer->second; } else { libcamera::LOG(V4L2, Warning) << "Unrecognised transfer function in " << ColorSpace::toString(colorSpace); ret = -EINVAL; } auto itYcbcrEncoding = ycbcrEncodingToV4l2.find(colorSpace->ycbcrEncoding); if (itYcbcrEncoding != ycbcrEncodingToV4l2.end()) { v4l2Format.ycbcr_enc = itYcbcrEncoding->second; } else { libcamera::LOG(V4L2, Warning) << "Unrecognised YCbCr encoding in " << ColorSpace::toString(colorSpace); ret = -EINVAL; } auto itRange = rangeToV4l2.find(colorSpace->range); if (itRange != rangeToV4l2.end()) { v4l2Format.quantization = itRange->second; } else { libcamera::LOG(V4L2, Warning) << "Unrecognised quantization in " << ColorSpace::toString(colorSpace); ret = -EINVAL; } return ret; } template int V4L2Device::fromColorSpace(const std::optional<ColorSpace> &, struct v4l2_pix_format &); template int V4L2Device::fromColorSpace(const std::optional<ColorSpace> &, struct v4l2_pix_format_mplane &); template int V4L2Device::fromColorSpace(const std::optional<ColorSpace> &, struct v4l2_mbus_framefmt &); } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipc_unixsocket.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * IPC mechanism based on Unix sockets */ #include "libcamera/internal/ipc_unixsocket.h" #include <array> #include <poll.h> #include <string.h> #include <sys/socket.h> #include <unistd.h> #include <libcamera/base/event_notifier.h> #include <libcamera/base/log.h> /** * \file ipc_unixsocket.h * \brief IPC mechanism based on Unix sockets */ namespace libcamera { LOG_DEFINE_CATEGORY(IPCUnixSocket) /** * \struct IPCUnixSocket::Payload * \brief Container for an IPC payload * * Holds an array of bytes and an array of file descriptors that can be * transported across a IPC boundary. */ /** * \var IPCUnixSocket::Payload::data * \brief Array of bytes to cross IPC boundary */ /** * \var IPCUnixSocket::Payload::fds * \brief Array of file descriptors to cross IPC boundary */ /** * \class IPCUnixSocket * \brief IPC mechanism based on Unix sockets * * The Unix socket IPC allows bidirectional communication between two processes * through unnamed Unix sockets. It implements datagram-based communication, * transporting entire payloads with guaranteed ordering. * * The IPC design is asynchronous, a message is queued to a receiver which gets * notified that a message is ready to be consumed by the \ref readyRead * signal. The sender of the message gets no notification when a message is * delivered nor processed. If such interactions are needed a protocol specific * to the users use-case should be implemented on top of the IPC objects. * * Establishment of an IPC channel is asymmetrical. The side that initiates * communication first instantiates a local side socket and creates the channel * with create(). The function returns a file descriptor for the remote side of * the channel, which is passed to the remote process through an out-of-band * communication method. The remote side then instantiates a socket, and binds * it to the other side by passing the file descriptor to bind(). At that point * the channel is operation and communication is bidirectional and symmmetrical. * * \context This class is \threadbound. */ IPCUnixSocket::IPCUnixSocket() : headerReceived_(false), notifier_(nullptr) { } IPCUnixSocket::~IPCUnixSocket() { close(); } /** * \brief Create an new IPC channel * * This function creates a new IPC channel. The socket instance is bound to the * local side of the channel, and the function returns a file descriptor bound * to the remote side. The caller is responsible for passing the file descriptor * to the remote process, where it can be used with IPCUnixSocket::bind() to * bind the remote side socket. * * \return A file descriptor. It is valid on success or invalid otherwise. */ UniqueFD IPCUnixSocket::create() { int sockets[2]; int ret; ret = socketpair(AF_UNIX, SOCK_DGRAM | SOCK_NONBLOCK, 0, sockets); if (ret) { ret = -errno; LOG(IPCUnixSocket, Error) << "Failed to create socket pair: " << strerror(-ret); return {}; } std::array<UniqueFD, 2> socketFds{ UniqueFD(sockets[0]), UniqueFD(sockets[1]), }; if (bind(std::move(socketFds[0])) < 0) return {}; return std::move(socketFds[1]); } /** * \brief Bind to an existing IPC channel * \param[in] fd File descriptor * * This function binds the socket instance to an existing IPC channel identified * by the file descriptor \a fd. The file descriptor is obtained from the * IPCUnixSocket::create() function. * * \return 0 on success or a negative error code otherwise */ int IPCUnixSocket::bind(UniqueFD fd) { if (isBound()) return -EINVAL; fd_ = std::move(fd); notifier_ = new EventNotifier(fd_.get(), EventNotifier::Read); notifier_->activated.connect(this, &IPCUnixSocket::dataNotifier); return 0; } /** * \brief Close the IPC channel * * No communication is possible after close() has been called. */ void IPCUnixSocket::close() { if (!isBound()) return; delete notifier_; notifier_ = nullptr; fd_.reset(); headerReceived_ = false; } /** * \brief Check if the IPC channel is bound * \return True if the IPC channel is bound, false otherwise */ bool IPCUnixSocket::isBound() const { return fd_.isValid(); } /** * \brief Send a message payload * \param[in] payload Message payload to send * * This function queues the message payload for transmission to the other end of * the IPC channel. It returns immediately, before the message is delivered to * the remote side. * * \return 0 on success or a negative error code otherwise */ int IPCUnixSocket::send(const Payload &payload) { int ret; if (!isBound()) return -ENOTCONN; Header hdr = {}; hdr.data = payload.data.size(); hdr.fds = payload.fds.size(); if (!hdr.data && !hdr.fds) return -EINVAL; ret = ::send(fd_.get(), &hdr, sizeof(hdr), 0); if (ret < 0) { ret = -errno; LOG(IPCUnixSocket, Error) << "Failed to send: " << strerror(-ret); return ret; } return sendData(payload.data.data(), hdr.data, payload.fds.data(), hdr.fds); } /** * \brief Receive a message payload * \param[out] payload Payload where to write the received message * * This function receives the message payload from the IPC channel and writes it * to the \a payload. If no message payload is available, it returns * immediately with -EAGAIN. The \ref readyRead signal shall be used to receive * notification of message availability. * * \todo Add state machine to make sure we don't block forever and that * a header is always followed by a payload. * * \return 0 on success or a negative error code otherwise * \retval -EAGAIN No message payload is available * \retval -ENOTCONN The socket is not connected (neither create() nor bind() * has been called) */ int IPCUnixSocket::receive(Payload *payload) { if (!isBound()) return -ENOTCONN; if (!headerReceived_) return -EAGAIN; payload->data.resize(header_.data); payload->fds.resize(header_.fds); int ret = recvData(payload->data.data(), header_.data, payload->fds.data(), header_.fds); if (ret < 0) return ret; headerReceived_ = false; notifier_->setEnabled(true); return 0; } /** * \var IPCUnixSocket::readyRead * \brief A Signal emitted when a message is ready to be read */ int IPCUnixSocket::sendData(const void *buffer, size_t length, const int32_t *fds, unsigned int num) { struct iovec iov[1]; iov[0].iov_base = const_cast<void *>(buffer); iov[0].iov_len = length; char buf[CMSG_SPACE(num * sizeof(uint32_t))]; memset(buf, 0, sizeof(buf)); struct cmsghdr *cmsg = (struct cmsghdr *)buf; cmsg->cmsg_len = CMSG_LEN(num * sizeof(uint32_t)); cmsg->cmsg_level = SOL_SOCKET; cmsg->cmsg_type = SCM_RIGHTS; struct msghdr msg; msg.msg_name = nullptr; msg.msg_namelen = 0; msg.msg_iov = iov; msg.msg_iovlen = 1; msg.msg_control = cmsg; msg.msg_controllen = cmsg->cmsg_len; msg.msg_flags = 0; if (fds) memcpy(CMSG_DATA(cmsg), fds, num * sizeof(uint32_t)); if (sendmsg(fd_.get(), &msg, 0) < 0) { int ret = -errno; LOG(IPCUnixSocket, Error) << "Failed to sendmsg: " << strerror(-ret); return ret; } return 0; } int IPCUnixSocket::recvData(void *buffer, size_t length, int32_t *fds, unsigned int num) { struct iovec iov[1]; iov[0].iov_base = buffer; iov[0].iov_len = length; char buf[CMSG_SPACE(num * sizeof(uint32_t))]; memset(buf, 0, sizeof(buf)); struct cmsghdr *cmsg = (struct cmsghdr *)buf; cmsg->cmsg_len = CMSG_LEN(num * sizeof(uint32_t)); cmsg->cmsg_level = SOL_SOCKET; cmsg->cmsg_type = SCM_RIGHTS; struct msghdr msg; msg.msg_name = nullptr; msg.msg_namelen = 0; msg.msg_iov = iov; msg.msg_iovlen = 1; msg.msg_control = cmsg; msg.msg_controllen = cmsg->cmsg_len; msg.msg_flags = 0; if (recvmsg(fd_.get(), &msg, 0) < 0) { int ret = -errno; if (ret != -EAGAIN) LOG(IPCUnixSocket, Error) << "Failed to recvmsg: " << strerror(-ret); return ret; } if (fds) memcpy(fds, CMSG_DATA(cmsg), num * sizeof(uint32_t)); return 0; } void IPCUnixSocket::dataNotifier() { int ret; if (!headerReceived_) { /* Receive the header. */ ret = ::recv(fd_.get(), &header_, sizeof(header_), 0); if (ret < 0) { ret = -errno; LOG(IPCUnixSocket, Error) << "Failed to receive header: " << strerror(-ret); return; } headerReceived_ = true; } /* * If the payload has arrived, disable the notifier and emit the * readyRead signal. The notifier will be reenabled by the receive() * function. */ struct pollfd fds = { fd_.get(), POLLIN, 0 }; ret = poll(&fds, 1, 0); if (ret < 0) return; if (!(fds.revents & POLLIN)) return; notifier_->setEnabled(false); readyRead.emit(); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/v4l2_pixelformat.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * Copyright (C) 2020, Raspberry Pi Ltd * * V4L2 Pixel Format */ #include "libcamera/internal/v4l2_pixelformat.h" #include <ctype.h> #include <map> #include <string.h> #include <libcamera/base/log.h> #include <libcamera/formats.h> #include <libcamera/pixel_format.h> #include "libcamera/internal/formats.h" /** * \file v4l2_pixelformat.h * \brief V4L2 Pixel Format */ namespace libcamera { LOG_DECLARE_CATEGORY(V4L2) /** * \class V4L2PixelFormat * \brief V4L2 pixel format FourCC wrapper * * The V4L2PixelFormat class describes the pixel format of a V4L2 buffer. It * wraps the V4L2 numerical FourCC, and shall be used in all APIs that deal with * V4L2 pixel formats. Its purpose is to prevent unintentional confusion of * V4L2 and DRM FourCCs in code by catching implicit conversion attempts at * compile time. * * To achieve this goal, construction of a V4L2PixelFormat from an integer value * is explicit. To retrieve the integer value of a V4L2PixelFormat, both the * explicit value() and implicit uint32_t conversion operators may be used. */ namespace { const std::map<V4L2PixelFormat, V4L2PixelFormat::Info> vpf2pf{ /* RGB formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_RGB565), { formats::RGB565, "16-bit RGB 5-6-5" } }, { V4L2PixelFormat(V4L2_PIX_FMT_RGB565X), { formats::RGB565_BE, "16-bit RGB 5-6-5 BE" } }, { V4L2PixelFormat(V4L2_PIX_FMT_RGB24), { formats::BGR888, "24-bit RGB 8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_BGR24), { formats::RGB888, "24-bit BGR 8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_XBGR32), { formats::XRGB8888, "32-bit BGRX 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_XRGB32), { formats::BGRX8888, "32-bit XRGB 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_RGBX32), { formats::XBGR8888, "32-bit RGBX 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_BGRX32), { formats::RGBX8888, "32-bit XBGR 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_RGBA32), { formats::ABGR8888, "32-bit RGBA 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_ABGR32), { formats::ARGB8888, "32-bit BGRA 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_ARGB32), { formats::BGRA8888, "32-bit ARGB 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_BGRA32), { formats::RGBA8888, "32-bit ABGR 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_RGB48), { formats::BGR161616, "48-bit RGB 16-16-16" } }, { V4L2PixelFormat(V4L2_PIX_FMT_BGR48), { formats::RGB161616, "48-bit BGR 16-16-16" } }, /* YUV packed formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_YUYV), { formats::YUYV, "YUYV 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YVYU), { formats::YVYU, "YVYU 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_UYVY), { formats::UYVY, "UYVY 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_VYUY), { formats::VYUY, "VYUY 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUVA32), { formats::AVUY8888, "32-bit YUVA 8-8-8-8" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUVX32), { formats::XVUY8888, "32-bit YUVX 8-8-8-8" } }, /* YUV planar formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_NV16), { formats::NV16, "Y/CbCr 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV16M), { formats::NV16, "Y/CbCr 4:2:2 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV61), { formats::NV61, "Y/CrCb 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV61M), { formats::NV61, "Y/CrCb 4:2:2 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV12), { formats::NV12, "Y/CbCr 4:2:0" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV12M), { formats::NV12, "Y/CbCr 4:2:0 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV21), { formats::NV21, "Y/CrCb 4:2:0" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV21M), { formats::NV21, "Y/CrCb 4:2:0 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV24), { formats::NV24, "Y/CbCr 4:4:4" } }, { V4L2PixelFormat(V4L2_PIX_FMT_NV42), { formats::NV42, "Y/CrCb 4:4:4" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUV420), { formats::YUV420, "Planar YUV 4:2:0" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUV420M), { formats::YUV420, "Planar YUV 4:2:0 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YVU420), { formats::YVU420, "Planar YVU 4:2:0" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YVU420M), { formats::YVU420, "Planar YVU 4:2:0 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUV422P), { formats::YUV422, "Planar YUV 4:2:2" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUV422M), { formats::YUV422, "Planar YUV 4:2:2 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YVU422M), { formats::YVU422, "Planar YVU 4:2:2 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YUV444M), { formats::YUV444, "Planar YUV 4:4:4 (N-C)" } }, { V4L2PixelFormat(V4L2_PIX_FMT_YVU444M), { formats::YVU444, "Planar YVU 4:4:4 (N-C)" } }, /* Greyscale formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_GREY), { formats::R8, "8-bit Greyscale" } }, { V4L2PixelFormat(V4L2_PIX_FMT_Y10), { formats::R10, "10-bit Greyscale" } }, { V4L2PixelFormat(V4L2_PIX_FMT_Y10P), { formats::R10_CSI2P, "10-bit Greyscale Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_Y12), { formats::R12, "12-bit Greyscale" } }, { V4L2PixelFormat(V4L2_PIX_FMT_Y16), { formats::R16, "16-bit Greyscale" } }, /* Bayer formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR8), { formats::SBGGR8, "8-bit Bayer BGBG/GRGR" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG8), { formats::SGBRG8, "8-bit Bayer GBGB/RGRG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG8), { formats::SGRBG8, "8-bit Bayer GRGR/BGBG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB8), { formats::SRGGB8, "8-bit Bayer RGRG/GBGB" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10), { formats::SBGGR10, "10-bit Bayer BGBG/GRGR" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10), { formats::SGBRG10, "10-bit Bayer GBGB/RGRG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10), { formats::SGRBG10, "10-bit Bayer GRGR/BGBG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10), { formats::SRGGB10, "10-bit Bayer RGRG/GBGB" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10P), { formats::SBGGR10_CSI2P, "10-bit Bayer BGBG/GRGR Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10P), { formats::SGBRG10_CSI2P, "10-bit Bayer GBGB/RGRG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10P), { formats::SGRBG10_CSI2P, "10-bit Bayer GRGR/BGBG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10P), { formats::SRGGB10_CSI2P, "10-bit Bayer RGRG/GBGB Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12), { formats::SBGGR12, "12-bit Bayer BGBG/GRGR" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12), { formats::SGBRG12, "12-bit Bayer GBGB/RGRG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12), { formats::SGRBG12, "12-bit Bayer GRGR/BGBG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12), { formats::SRGGB12, "12-bit Bayer RGRG/GBGB" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12P), { formats::SBGGR12_CSI2P, "12-bit Bayer BGBG/GRGR Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12P), { formats::SGBRG12_CSI2P, "12-bit Bayer GBGB/RGRG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12P), { formats::SGRBG12_CSI2P, "12-bit Bayer GRGR/BGBG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12P), { formats::SRGGB12_CSI2P, "12-bit Bayer RGRG/GBGB Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14), { formats::SBGGR14, "14-bit Bayer BGBG/GRGR" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14), { formats::SGBRG14, "14-bit Bayer GBGB/RGRG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14), { formats::SGRBG14, "14-bit Bayer GRGR/BGBG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14), { formats::SRGGB14, "14-bit Bayer RGRG/GBGB" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14P), { formats::SBGGR14_CSI2P, "14-bit Bayer BGBG/GRGR Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14P), { formats::SGBRG14_CSI2P, "14-bit Bayer GBGB/RGRG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14P), { formats::SGRBG14_CSI2P, "14-bit Bayer GRGR/BGBG Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14P), { formats::SRGGB14_CSI2P, "14-bit Bayer RGRG/GBGB Packed" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR16), { formats::SBGGR16, "16-bit Bayer BGBG/GRGR" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG16), { formats::SGBRG16, "16-bit Bayer GBGB/RGRG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG16), { formats::SGRBG16, "16-bit Bayer GRGR/BGBG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB16), { formats::SRGGB16, "16-bit Bayer RGRG/GBGB" } }, { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_BGGR), { formats::BGGR_PISP_COMP1, "16-bit Bayer BGBG/GRGR PiSP Compress Mode 1" } }, { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GBRG), { formats::GBRG_PISP_COMP1, "16-bit Bayer GBGB/RGRG PiSP Compress Mode 1" } }, { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GRBG), { formats::GRBG_PISP_COMP1, "16-bit Bayer GRGR/BGBG PiSP Compress Mode 1" } }, { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_RGGB), { formats::RGGB_PISP_COMP1, "16-bit Bayer RGRG/GBGB PiSP Compress Mode 1" } }, { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_MONO), { formats::MONO_PISP_COMP1, "16-bit Mono PiSP Compress Mode 1" } }, /* Compressed formats. */ { V4L2PixelFormat(V4L2_PIX_FMT_MJPEG), { formats::MJPEG, "Motion-JPEG" } }, { V4L2PixelFormat(V4L2_PIX_FMT_JPEG), { formats::MJPEG, "JPEG JFIF" } }, }; } /* namespace */ /** * \struct V4L2PixelFormat::Info * \brief Information about a V4L2 pixel format * * \var V4L2PixelFormat::Info::format * \brief The corresponding libcamera PixelFormat * * \sa PixelFormat * * \var V4L2PixelFormat::Info::description * \brief The human-readable description of the V4L2 pixel format */ /** * \fn V4L2PixelFormat::V4L2PixelFormat() * \brief Construct a V4L2PixelFormat with an invalid format * * V4L2PixelFormat instances constructed with the default constructor are * invalid, calling the isValid() function returns false. */ /** * \fn V4L2PixelFormat::V4L2PixelFormat(uint32_t fourcc) * \brief Construct a V4L2PixelFormat from a FourCC value * \param[in] fourcc The pixel format FourCC numerical value */ /** * \fn bool V4L2PixelFormat::isValid() const * \brief Check if the pixel format is valid * * V4L2PixelFormat instances constructed with the default constructor are * invalid. Instances constructed with a FourCC defined in the V4L2 API are * valid. The behaviour is undefined otherwise. * * \return True if the pixel format is valid, false otherwise */ /** * \fn uint32_t V4L2PixelFormat::fourcc() const * \brief Retrieve the pixel format FourCC numerical value * \return The pixel format FourCC numerical value */ /** * \fn V4L2PixelFormat::operator uint32_t() const * \brief Convert to the pixel format FourCC numerical value * \return The pixel format FourCC numerical value */ /** * \brief Assemble and return a string describing the pixel format * \return A string describing the pixel format */ std::string V4L2PixelFormat::toString() const { if (fourcc_ == 0) return "<INVALID>"; char ss[8] = { static_cast<char>(fourcc_ & 0x7f), static_cast<char>((fourcc_ >> 8) & 0x7f), static_cast<char>((fourcc_ >> 16) & 0x7f), static_cast<char>((fourcc_ >> 24) & 0x7f) }; for (unsigned int i = 0; i < 4; i++) { if (!isprint(ss[i])) ss[i] = '.'; } if (fourcc_ & (1 << 31)) strcat(ss, "-BE"); return ss; } /** * \brief Retrieve the V4L2 description for the format * * The description matches the value used by the kernel, as would be reported * by the VIDIOC_ENUM_FMT ioctl. * * \return The V4L2 description corresponding to the V4L2 format, or a * placeholder description if not found */ const char *V4L2PixelFormat::description() const { const auto iter = vpf2pf.find(*this); if (iter == vpf2pf.end()) { LOG(V4L2, Warning) << "Unsupported V4L2 pixel format " << toString(); return "Unsupported format"; } return iter->second.description; } /** * \brief Convert the V4L2 pixel format to the corresponding PixelFormat * \param[in] warn When true, log a warning message if the V4L2 pixel format * isn't known * * Users of this function might try to convert a V4L2PixelFormat to a * PixelFormat just to check if the format is supported or not. In that case, * they can suppress the warning message by setting the \a warn argument to * false to not pollute the log with unnecessary messages. * * \return The PixelFormat corresponding to the V4L2 pixel format */ PixelFormat V4L2PixelFormat::toPixelFormat(bool warn) const { const auto iter = vpf2pf.find(*this); if (iter == vpf2pf.end()) { if (warn) LOG(V4L2, Warning) << "Unsupported V4L2 pixel format " << toString(); return PixelFormat(); } return iter->second.format; } /** * \brief Retrieve the list of V4L2PixelFormat associated with \a pixelFormat * \param[in] pixelFormat The PixelFormat to convert * * Multiple V4L2 formats may exist for one PixelFormat as V4L2 defines separate * 4CCs for contiguous and non-contiguous versions of the same image format. * * \return The list of V4L2PixelFormat corresponding to \a pixelFormat */ const std::vector<V4L2PixelFormat> & V4L2PixelFormat::fromPixelFormat(const PixelFormat &pixelFormat) { static const std::vector<V4L2PixelFormat> empty; const PixelFormatInfo &info = PixelFormatInfo::info(pixelFormat); if (!info.isValid()) return empty; return info.v4l2Formats; } /** * \brief Insert a text representation of a V4L2PixelFormat into an output * stream * \param[in] out The output stream * \param[in] f The V4L2PixelFormat * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2PixelFormat &f) { out << f.toString(); return out; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/framebuffer_allocator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * FrameBuffer allocator */ #include <libcamera/framebuffer_allocator.h> #include <errno.h> #include <libcamera/base/log.h> #include <libcamera/camera.h> #include <libcamera/framebuffer.h> #include <libcamera/stream.h> #include "libcamera/internal/pipeline_handler.h" /** * \file framebuffer_allocator.h * \brief FrameBuffer allocator */ namespace libcamera { LOG_DEFINE_CATEGORY(Allocator) /** * \class FrameBufferAllocator * \brief FrameBuffer allocator for applications * * The libcamera API is designed to consume buffers provided by applications as * FrameBuffer instances. This makes libcamera a user of buffers exported by * other devices (such as displays or video encoders), or allocated from an * external allocator (such as ION on Android platforms). In some situations, * applications do not have any means to allocate or get hold of suitable * buffers, for instance when no other device is involved, on Linux platforms * that lack a centralized allocator. The FrameBufferAllocator class provides a * buffer allocator that can be used in these situations. * * Applications create a framebuffer allocator for a Camera, and use it to * allocate buffers for streams of a CameraConfiguration with allocate(). They * control which streams to allocate buffers for, and can thus use external * buffers for a subset of the streams if desired. * * Buffers are deleted for a stream with free(), and destroying the allocator * automatically deletes all allocated buffers. Applications own the buffers * allocated by the FrameBufferAllocator and are responsible for ensuring the * buffers are not deleted while they are in use (part of a Request that has * been queued and hasn't completed yet). * * Usage of the FrameBufferAllocator is optional, if all buffers for a camera * are provided externally applications shall not use this class. */ /** * \brief Construct a FrameBufferAllocator serving a camera * \param[in] camera The camera */ FrameBufferAllocator::FrameBufferAllocator(std::shared_ptr<Camera> camera) : camera_(std::move(camera)) { } FrameBufferAllocator::~FrameBufferAllocator() = default; /** * \brief Allocate buffers for a configured stream * \param[in] stream The stream to allocate buffers for * * Allocate buffers suitable for capturing frames from the \a stream. The Camera * shall have been previously configured with Camera::configure() and shall be * stopped, and the stream shall be part of the active camera configuration. * * Upon successful allocation, the allocated buffers can be retrieved with the * buffers() function. * * \return The number of allocated buffers on success or a negative error code * otherwise * \retval -EACCES The camera is not in a state where buffers can be allocated * \retval -EINVAL The \a stream does not belong to the camera or the stream is * not part of the active camera configuration * \retval -EBUSY Buffers are already allocated for the \a stream */ int FrameBufferAllocator::allocate(Stream *stream) { const auto &[it, inserted] = buffers_.try_emplace(stream); if (!inserted) { LOG(Allocator, Error) << "Buffers already allocated for stream"; return -EBUSY; } int ret = camera_->exportFrameBuffers(stream, &it->second); if (ret == -EINVAL) LOG(Allocator, Error) << "Stream is not part of " << camera_->id() << " active configuration"; if (ret < 0) buffers_.erase(it); return ret; } /** * \brief Free buffers previously allocated for a \a stream * \param[in] stream The stream * * Free buffers allocated with allocate(). * * This invalidates the buffers returned by buffers(). * * \return 0 on success or a negative error code otherwise * \retval -EACCES The camera is not in a state where buffers can be freed * \retval -EINVAL The allocator do not handle the \a stream */ int FrameBufferAllocator::free(Stream *stream) { auto iter = buffers_.find(stream); if (iter == buffers_.end()) return -EINVAL; buffers_.erase(iter); return 0; } /** * \fn FrameBufferAllocator::allocated() * \brief Check if the allocator has allocated buffers for any stream * \return True if the allocator has allocated buffers for one or more * streams, false otherwise */ /** * \brief Retrieve the buffers allocated for a \a stream * \param[in] stream The stream to retrieve buffers for * * This function shall only be called after successfully allocating buffers for * \a stream with allocate(). The returned buffers are valid until free() is * called for the same stream or the FrameBufferAllocator instance is destroyed. * * \return The buffers allocated for the \a stream */ const std::vector<std::unique_ptr<FrameBuffer>> & FrameBufferAllocator::buffers(Stream *stream) const { static const std::vector<std::unique_ptr<FrameBuffer>> empty; auto iter = buffers_.find(stream); if (iter == buffers_.end()) return empty; return iter->second; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/geometry.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Geometry-related structures */ #include <libcamera/geometry.h> #include <sstream> #include <stdint.h> #include <libcamera/base/log.h> /** * \file geometry.h * \brief Data structures related to geometric objects */ namespace libcamera { /** * \class Point * \brief Describe a point in two-dimensional space * * The Point structure defines a point in two-dimensional space with integer * precision. The coordinates of a Point may be negative as well as positive. */ /** * \fn Point::Point() * \brief Construct a Point with x and y set to 0 */ /** * \fn Point::Point(int xpos, int ypos) * \brief Construct a Point at given \a xpos and \a ypos values * \param[in] xpos The x-coordinate * \param[in] ypos The y-coordinate */ /** * \var Point::x * \brief The x-coordinate of the Point */ /** * \var Point::y * \brief The y-coordinate of the Point */ /** * \brief Assemble and return a string describing the point * \return A string describing the point */ const std::string Point::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \fn Point Point::operator-() const * \brief Negate a Point by negating both its x and y coordinates * \return The negated point */ /** * \brief Compare points for equality * \return True if the two points are equal, false otherwise */ bool operator==(const Point &lhs, const Point &rhs) { return lhs.x == rhs.x && lhs.y == rhs.y; } /** * \fn bool operator!=(const Point &lhs, const Point &rhs) * \brief Compare points for inequality * \return True if the two points are not equal, false otherwise */ /** * \brief Insert a text representation of a Point into an output stream * \param[in] out The output stream * \param[in] p The point * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const Point &p) { out << "(" << p.x << ", " << p.y << ")"; return out; } /** * \class Size * \brief Describe a two-dimensional size * * The Size class defines a two-dimensional size with integer precision. */ /** * \fn Size::Size() * \brief Construct a Size with width and height set to 0 */ /** * \fn Size::Size(unsigned int width, unsigned int height) * \brief Construct a Size with given \a width and \a height * \param[in] width The Size width * \param[in] height The Size height */ /** * \var Size::width * \brief The Size width */ /** * \var Size::height * \brief The Size height */ /** * \fn bool Size::isNull() const * \brief Check if the size is null * \return True if both the width and height are 0, or false otherwise */ /** * \brief Assemble and return a string describing the size * \return A string describing the size */ const std::string Size::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \fn Size::alignDownTo(unsigned int hAlignment, unsigned int vAlignment) * \brief Align the size down horizontally and vertically in place * \param[in] hAlignment Horizontal alignment * \param[in] vAlignment Vertical alignment * * This functions rounds the width and height down to the nearest multiple of * \a hAlignment and \a vAlignment respectively. * * \return A reference to this object */ /** * \fn Size::alignUpTo(unsigned int hAlignment, unsigned int vAlignment) * \brief Align the size up horizontally and vertically in place * \param[in] hAlignment Horizontal alignment * \param[in] vAlignment Vertical alignment * * This functions rounds the width and height up to the nearest multiple of * \a hAlignment and \a vAlignment respectively. * * \return A reference to this object */ /** * \fn Size::boundTo(const Size &bound) * \brief Bound the size to \a bound in place * \param[in] bound The maximum size * * This function sets the width and height to the minimum of this size and the * \a bound size. * * \return A reference to this object */ /** * \fn Size::expandTo(const Size &expand) * \brief Expand the size to \a expand * \param[in] expand The minimum size * * This function sets the width and height to the maximum of this size and the * \a expand size. * * \return A reference to this object */ /** * \fn Size::growBy(const Size &margins) * \brief Grow the size by \a margins in place * \param[in] margins The margins to add to the size * * This function adds the width and height of the \a margin size to this size. * * \return A reference to this object */ /** * \fn Size::shrinkBy(const Size &margins) * \brief Shrink the size by \a margins in place * \param[in] margins The margins to subtract to the size * * This function subtracts the width and height of the \a margin size from this * size. If the width or height of the size are smaller than those of \a * margins, the result is clamped to 0. * * \return A reference to this object */ /** * \fn Size::alignedDownTo(unsigned int hAlignment, unsigned int vAlignment) * \brief Align the size down horizontally and vertically * \param[in] hAlignment Horizontal alignment * \param[in] vAlignment Vertical alignment * \return A Size whose width and height are equal to the width and height of * this size rounded down to the nearest multiple of \a hAlignment and * \a vAlignment respectively */ /** * \fn Size::alignedUpTo(unsigned int hAlignment, unsigned int vAlignment) * \brief Align the size up horizontally and vertically * \param[in] hAlignment Horizontal alignment * \param[in] vAlignment Vertical alignment * \return A Size whose width and height are equal to the width and height of * this size rounded up to the nearest multiple of \a hAlignment and * \a vAlignment respectively */ /** * \fn Size::boundedTo(const Size &bound) * \brief Bound the size to \a bound * \param[in] bound The maximum size * \return A Size whose width and height are the minimum of the width and * height of this size and the \a bound size */ /** * \fn Size::expandedTo(const Size &expand) * \brief Expand the size to \a expand * \param[in] expand The minimum size * \return A Size whose width and height are the maximum of the width and * height of this size and the \a expand size */ /** * \fn Size::grownBy(const Size &margins) * \brief Grow the size by \a margins * \param[in] margins The margins to add to the size * \return A Size whose width and height are the sum of the width and height of * this size and the \a margins size */ /** * \fn Size::shrunkBy(const Size &margins) * \brief Shrink the size by \a margins * \param[in] margins The margins to subtract to the size * * If the width or height of the size are smaller than those of \a margins, the * resulting size has its width or height clamped to 0. * * \return A Size whose width and height are the difference of the width and * height of this size and the \a margins size, clamped to 0 */ /** * \brief Bound the size down to match the aspect ratio given by \a ratio * \param[in] ratio The size whose aspect ratio must be matched * * The behaviour of this function is undefined if either the width or the * height of the \a ratio is zero. * * \return A Size whose width and height are equal to the width and height * of this Size aligned down to the aspect ratio of \a ratio */ Size Size::boundedToAspectRatio(const Size &ratio) const { ASSERT(ratio.width && ratio.height); uint64_t ratio1 = static_cast<uint64_t>(width) * static_cast<uint64_t>(ratio.height); uint64_t ratio2 = static_cast<uint64_t>(ratio.width) * static_cast<uint64_t>(height); if (ratio1 > ratio2) return { static_cast<unsigned int>(ratio2 / ratio.height), height }; else return { width, static_cast<unsigned int>(ratio1 / ratio.width) }; } /** * \brief Expand the size to match the aspect ratio given by \a ratio * \param[in] ratio The size whose aspect ratio must be matched * * The behaviour of this function is undefined if either the width or the * height of the \a ratio is zero. * * \return A Size whose width and height are equal to the width and height * of this Size expanded up to the aspect ratio of \a ratio */ Size Size::expandedToAspectRatio(const Size &ratio) const { ASSERT(ratio.width && ratio.height); uint64_t ratio1 = static_cast<uint64_t>(width) * static_cast<uint64_t>(ratio.height); uint64_t ratio2 = static_cast<uint64_t>(ratio.width) * static_cast<uint64_t>(height); if (ratio1 < ratio2) return { static_cast<unsigned int>(ratio2 / ratio.height), height }; else return { width, static_cast<unsigned int>(ratio1 / ratio.width) }; } /** * \brief Center a rectangle of this size at a given Point * \param[in] center The center point the Rectangle is to have * * A Rectangle of this object's size is positioned so that its center * is at the given Point. * * \return A Rectangle of this size, centered at the given Point. */ Rectangle Size::centeredTo(const Point &center) const { int x = center.x - width / 2; int y = center.y - height / 2; return { x, y, width, height }; } /** * \brief Scale size up by the given factor * \param[in] factor The factor * \return The scaled Size */ Size Size::operator*(float factor) const { return Size(width * factor, height * factor); } /** * \brief Scale size down by the given factor * \param[in] factor The factor * \return The scaled Size */ Size Size::operator/(float factor) const { return Size(width / factor, height / factor); } /** * \brief Scale this size up by the given factor in place * \param[in] factor The factor * \return A reference to this object */ Size &Size::operator*=(float factor) { width *= factor; height *= factor; return *this; } /** * \brief Scale this size down by the given factor in place * \param[in] factor The factor * \return A reference to this object */ Size &Size::operator/=(float factor) { width /= factor; height /= factor; return *this; } /** * \brief Compare sizes for equality * \return True if the two sizes are equal, false otherwise */ bool operator==(const Size &lhs, const Size &rhs) { return lhs.width == rhs.width && lhs.height == rhs.height; } /** * \brief Compare sizes for smaller than order * * Sizes are compared on three criteria, in the following order. * * - A size with smaller width and smaller height is smaller. * - A size with smaller area is smaller. * - A size with smaller width is smaller. * * \return True if \a lhs is smaller than \a rhs, false otherwise */ bool operator<(const Size &lhs, const Size &rhs) { if (lhs.width < rhs.width && lhs.height < rhs.height) return true; else if (lhs.width >= rhs.width && lhs.height >= rhs.height) return false; uint64_t larea = static_cast<uint64_t>(lhs.width) * static_cast<uint64_t>(lhs.height); uint64_t rarea = static_cast<uint64_t>(rhs.width) * static_cast<uint64_t>(rhs.height); if (larea < rarea) return true; else if (larea > rarea) return false; return lhs.width < rhs.width; } /** * \fn bool operator!=(const Size &lhs, const Size &rhs) * \brief Compare sizes for inequality * \return True if the two sizes are not equal, false otherwise */ /** * \fn bool operator<=(const Size &lhs, const Size &rhs) * \brief Compare sizes for smaller than or equal to order * \return True if \a lhs is smaller than or equal to \a rhs, false otherwise * \sa bool operator<(const Size &lhs, const Size &rhs) */ /** * \fn bool operator>(const Size &lhs, const Size &rhs) * \brief Compare sizes for greater than order * \return True if \a lhs is greater than \a rhs, false otherwise * \sa bool operator<(const Size &lhs, const Size &rhs) */ /** * \fn bool operator>=(const Size &lhs, const Size &rhs) * \brief Compare sizes for greater than or equal to order * \return True if \a lhs is greater than or equal to \a rhs, false otherwise * \sa bool operator<(const Size &lhs, const Size &rhs) */ /** * \brief Insert a text representation of a Size into an output stream * \param[in] out The output stream * \param[in] s The size * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const Size &s) { out << s.width << "x" << s.height; return out; } /** * \class SizeRange * \brief Describe a range of sizes * * A SizeRange describes a range of sizes included in the [min, max] interval * for both the width and the height. If the minimum and maximum sizes are * identical it represents a single size. * * Size ranges may further limit the valid sizes through steps in the horizontal * and vertical direction. The step values represent the increase in pixels * between two valid width or height values, starting from the minimum. Valid * sizes within the range are thus expressed as * * width = min.width + hStep * x * height = min.height + vStep * y * * Where * * width <= max.width * height < max.height * * Note that the step values are not equivalent to alignments, as the minimum * width or height may not be a multiple of the corresponding step. * * The step values may be zero when the range describes only minimum and * maximum sizes without implying that all, or any, intermediate size is valid. * SizeRange instances the describe a single size have both set values set to 1. */ /** * \fn SizeRange::SizeRange() * \brief Construct a size range initialized to 0 */ /** * \fn SizeRange::SizeRange(const Size &size) * \brief Construct a size range representing a single size * \param[in] size The size */ /** * \fn SizeRange::SizeRange(const Size &minSize, const Size &maxSize) * \brief Construct a size range with specified min and max, and steps of 1 * \param[in] minSize The minimum size * \param[in] maxSize The maximum size */ /** * \fn SizeRange::SizeRange(const Size &minSize, const Size &maxSize, * unsigned int hstep, unsigned int vstep) * \brief Construct a size range with specified min, max and step * \param[in] minSize The minimum size * \param[in] maxSize The maximum size * \param[in] hstep The horizontal step * \param[in] vstep The vertical step */ /** * \var SizeRange::min * \brief The minimum size */ /** * \var SizeRange::max * \brief The maximum size */ /** * \var SizeRange::hStep * \brief The horizontal step */ /** * \var SizeRange::vStep * \brief The vertical step */ /** * \brief Test if a size is contained in the range * \param[in] size Size to check * \return True if \a size is contained in the range */ bool SizeRange::contains(const Size &size) const { if (size.width < min.width || size.width > max.width || size.height < min.height || size.height > max.height || (hStep && (size.width - min.width) % hStep) || (vStep && (size.height - min.height) % vStep)) return false; return true; } /** * \brief Assemble and return a string describing the size range * \return A string describing the SizeRange */ std::string SizeRange::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Compare size ranges for equality * \return True if the two size ranges are equal, false otherwise */ bool operator==(const SizeRange &lhs, const SizeRange &rhs) { return lhs.min == rhs.min && lhs.max == rhs.max; } /** * \fn bool operator!=(const SizeRange &lhs, const SizeRange &rhs) * \brief Compare size ranges for inequality * \return True if the two size ranges are not equal, false otherwise */ /** * \brief Insert a text representation of a SizeRange into an output stream * \param[in] out The output stream * \param[in] sr The size range * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const SizeRange &sr) { out << "(" << sr.min << ")-(" << sr.max << ")/(+" << sr.hStep << ",+" << sr.vStep << ")"; return out; } /** * \class Rectangle * \brief Describe a rectangle's position and dimensions * * Rectangles are used to identify an area of an image. They are specified by * the coordinates of top-left corner and their horizontal and vertical size. * * The measure unit of the rectangle coordinates and size, as well as the * reference point from which the Rectangle::x and Rectangle::y displacements * refers to, are defined by the context were rectangle is used. */ /** * \fn Rectangle::Rectangle() * \brief Construct a Rectangle with all coordinates set to 0 */ /** * \fn Rectangle::Rectangle(int x, int y, const Size &size) * \brief Construct a Rectangle with the given position and size * \param[in] x The horizontal coordinate of the top-left corner * \param[in] y The vertical coordinate of the top-left corner * \param[in] size The size */ /** * \fn Rectangle::Rectangle(int x, int y, unsigned int width, unsigned int height) * \brief Construct a Rectangle with the given position and size * \param[in] x The horizontal coordinate of the top-left corner * \param[in] y The vertical coordinate of the top-left corner * \param[in] width The width * \param[in] height The height */ /** * \fn Rectangle::Rectangle(const Size &size) * \brief Construct a Rectangle of \a size with its top left corner located * at (0,0) * \param[in] size The desired Rectangle size */ /** * \var Rectangle::x * \brief The horizontal coordinate of the rectangle's top-left corner */ /** * \var Rectangle::y * \brief The vertical coordinate of the rectangle's top-left corner */ /** * \var Rectangle::width * \brief The distance between the left and right sides */ /** * \var Rectangle::height * \brief The distance between the top and bottom sides */ /** * \fn bool Rectangle::isNull() const * \brief Check if the rectangle is null * \return True if both the width and height are 0, or false otherwise */ /** * \brief Assemble and return a string describing the rectangle * \return A string describing the Rectangle */ const std::string Rectangle::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Retrieve the center point of this rectangle * \return The center Point */ Point Rectangle::center() const { return { x + static_cast<int>(width / 2), y + static_cast<int>(height / 2) }; } /** * \fn Size Rectangle::size() const * \brief Retrieve the size of this rectangle * \return The Rectangle size */ /** * \fn Point Rectangle::topLeft() const * \brief Retrieve the coordinates of the top left corner of this Rectangle * \return The Rectangle's top left corner */ /** * \brief Apply a non-uniform rational scaling in place to this Rectangle * \param[in] numerator The numerators of the x and y scaling factors * \param[in] denominator The denominators of the x and y scaling factors * * A non-uniform scaling is applied in place such the resulting x * coordinates are multiplied by numerator.width / denominator.width, * and similarly for the y coordinates (using height in place of width). * * \return A reference to this object */ Rectangle &Rectangle::scaleBy(const Size &numerator, const Size &denominator) { x = static_cast<int64_t>(x) * numerator.width / denominator.width; y = static_cast<int64_t>(y) * numerator.height / denominator.height; width = static_cast<uint64_t>(width) * numerator.width / denominator.width; height = static_cast<uint64_t>(height) * numerator.height / denominator.height; return *this; } /** * \brief Translate this Rectangle in place by the given Point * \param[in] point The amount to translate the Rectangle by * * The Rectangle is translated in the x-direction by the point's x coordinate * and in the y-direction by the point's y coordinate. * * \return A reference to this object */ Rectangle &Rectangle::translateBy(const Point &point) { x += point.x; y += point.y; return *this; } /** * \brief Calculate the intersection of this Rectangle with another * \param[in] bound The Rectangle that is intersected with this Rectangle * * This function calculates the standard intersection of two rectangles. If the * rectangles do not overlap in either the x or y direction, then the size * of that dimension in the result (its width or height) is set to zero. Even * when one dimension is set to zero, note that the other dimension may still * have a positive value if there was some overlap. * * \return A Rectangle that is the intersection of the input rectangles */ Rectangle Rectangle::boundedTo(const Rectangle &bound) const { int topLeftX = std::max(x, bound.x); int topLeftY = std::max(y, bound.y); int bottomRightX = std::min<int>(x + width, bound.x + bound.width); int bottomRightY = std::min<int>(y + height, bound.y + bound.height); unsigned int newWidth = std::max(bottomRightX - topLeftX, 0); unsigned int newHeight = std::max(bottomRightY - topLeftY, 0); return { topLeftX, topLeftY, newWidth, newHeight }; } /** * \brief Enclose a Rectangle so as not to exceed another Rectangle * \param[in] boundary The limit that the returned Rectangle will not exceed * * The Rectangle is modified so that it does not exceed the given \a boundary. * This process involves translating the Rectangle if any of its edges * lie beyond \a boundary, so that those edges then lie along the boundary * instead. * * If either width or height are larger than \a boundary, then the returned * Rectangle is clipped to be no larger. But other than this, the * Rectangle is not clipped or reduced in size, merely translated. * * Note that this is not a conventional Rectangle intersection function * which is provided by boundedTo(). * * \return A Rectangle that does not extend beyond a boundary Rectangle */ Rectangle Rectangle::enclosedIn(const Rectangle &boundary) const { /* We can't be bigger than the boundary rectangle. */ Rectangle result = boundedTo(Rectangle{ x, y, boundary.size() }); result.x = std::clamp<int>(result.x, boundary.x, boundary.x + boundary.width - result.width); result.y = std::clamp<int>(result.y, boundary.y, boundary.y + boundary.height - result.height); return result; } /** * \brief Apply a non-uniform rational scaling to this Rectangle * \param[in] numerator The numerators of the x and y scaling factors * \param[in] denominator The denominators of the x and y scaling factors * * A non-uniform scaling is applied such the resulting x * coordinates are multiplied by numerator.width / denominator.width, * and similarly for the y coordinates (using height in place of width). * * \return The non-uniformly scaled Rectangle */ Rectangle Rectangle::scaledBy(const Size &numerator, const Size &denominator) const { int scaledX = static_cast<int64_t>(x) * numerator.width / denominator.width; int scaledY = static_cast<int64_t>(y) * numerator.height / denominator.height; unsigned int scaledWidth = static_cast<uint64_t>(width) * numerator.width / denominator.width; unsigned int scaledHeight = static_cast<uint64_t>(height) * numerator.height / denominator.height; return { scaledX, scaledY, scaledWidth, scaledHeight }; } /** * \brief Translate a Rectangle by the given amounts * \param[in] point The amount to translate the Rectangle by * * The Rectangle is translated in the x-direction by the point's x coordinate * and in the y-direction by the point's y coordinate. * * \return The translated Rectangle */ Rectangle Rectangle::translatedBy(const Point &point) const { return { x + point.x, y + point.y, width, height }; } /** * \brief Compare rectangles for equality * \return True if the two rectangles are equal, false otherwise */ bool operator==(const Rectangle &lhs, const Rectangle &rhs) { return lhs.x == rhs.x && lhs.y == rhs.y && lhs.width == rhs.width && lhs.height == rhs.height; } /** * \fn bool operator!=(const Rectangle &lhs, const Rectangle &rhs) * \brief Compare rectangles for inequality * \return True if the two rectangles are not equal, false otherwise */ /** * \brief Insert a text representation of a Rectangle into an output stream * \param[in] out The output stream * \param[in] r The rectangle * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const Rectangle &r) { out << "(" << r.x << ", " << r.y << ")/" << r.width << "x" << r.height; return out; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_ids.cpp.in
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * control_ids.cpp : Control ID list * * This file is auto-generated. Do not edit. */ #include <libcamera/control_ids.h> #include <libcamera/controls.h> /** * \file control_ids.h * \brief Camera control identifiers */ namespace libcamera { /** * \brief Namespace for libcamera controls */ namespace controls { ${controls_doc} ${vendor_controls_doc} #ifndef __DOXYGEN__ /* * Keep the controls definitions hidden from doxygen as it incorrectly parses * them as functions. */ ${controls_def} ${vendor_controls_def} #endif /** * \brief List of all supported libcamera controls * * Unless otherwise stated, all controls are bi-directional, i.e. they can be * set through Request::controls() and returned out through Request::metadata(). */ extern const ControlIdMap controls { ${controls_map} }; } /* namespace controls */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_module.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Image Processing Algorithm module */ #include "libcamera/internal/ipa_module.h" #include <algorithm> #include <array> #include <ctype.h> #include <dlfcn.h> #include <elf.h> #include <errno.h> #include <fcntl.h> #include <link.h> #include <string.h> #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include <libcamera/base/file.h> #include <libcamera/base/log.h> #include <libcamera/base/span.h> #include <libcamera/base/utils.h> #include "libcamera/internal/pipeline_handler.h" /** * \file ipa_module.h * \brief Image Processing Algorithm module */ /** * \file ipa_module_info.h * \brief Image Processing Algorithm module information */ namespace libcamera { LOG_DEFINE_CATEGORY(IPAModule) namespace { template<typename T> typename std::remove_extent_t<T> *elfPointer(Span<const uint8_t> elf, off_t offset, size_t objSize) { size_t size = offset + objSize; if (size > elf.size() || size < objSize) return nullptr; return reinterpret_cast<typename std::remove_extent_t<T> *> (reinterpret_cast<const char *>(elf.data()) + offset); } template<typename T> typename std::remove_extent_t<T> *elfPointer(Span<const uint8_t> elf, off_t offset) { return elfPointer<T>(elf, offset, sizeof(T)); } int elfVerifyIdent(Span<const uint8_t> elf) { const char *e_ident = elfPointer<const char[EI_NIDENT]>(elf, 0); if (!e_ident) return -ENOEXEC; if (e_ident[EI_MAG0] != ELFMAG0 || e_ident[EI_MAG1] != ELFMAG1 || e_ident[EI_MAG2] != ELFMAG2 || e_ident[EI_MAG3] != ELFMAG3 || e_ident[EI_VERSION] != EV_CURRENT) return -ENOEXEC; int bitClass = sizeof(unsigned long) == 4 ? ELFCLASS32 : ELFCLASS64; if (e_ident[EI_CLASS] != bitClass) return -ENOEXEC; int a = 1; unsigned char endianness = *reinterpret_cast<char *>(&a) == 1 ? ELFDATA2LSB : ELFDATA2MSB; if (e_ident[EI_DATA] != endianness) return -ENOEXEC; return 0; } const ElfW(Shdr) *elfSection(Span<const uint8_t> elf, const ElfW(Ehdr) *eHdr, ElfW(Half) idx) { if (idx >= eHdr->e_shnum) return nullptr; off_t offset = eHdr->e_shoff + idx * static_cast<uint32_t>(eHdr->e_shentsize); return elfPointer<const ElfW(Shdr)>(elf, offset); } /** * \brief Retrieve address and size of a symbol from an mmap'ed ELF file * \param[in] elf Address and size of mmap'ed ELF file * \param[in] symbol Symbol name * * \return The memory region storing the symbol on success, or an empty span * otherwise */ Span<const uint8_t> elfLoadSymbol(Span<const uint8_t> elf, const char *symbol) { const ElfW(Ehdr) *eHdr = elfPointer<const ElfW(Ehdr)>(elf, 0); if (!eHdr) return {}; const ElfW(Shdr) *sHdr = elfSection(elf, eHdr, eHdr->e_shstrndx); if (!sHdr) return {}; off_t shnameoff = sHdr->sh_offset; /* Locate .dynsym section header. */ const ElfW(Shdr) *dynsym = nullptr; for (unsigned int i = 0; i < eHdr->e_shnum; i++) { sHdr = elfSection(elf, eHdr, i); if (!sHdr) return {}; off_t offset = shnameoff + sHdr->sh_name; const char *name = elfPointer<const char[8]>(elf, offset); if (!name) return {}; if (sHdr->sh_type == SHT_DYNSYM && !strcmp(name, ".dynsym")) { dynsym = sHdr; break; } } if (dynsym == nullptr) { LOG(IPAModule, Error) << "ELF has no .dynsym section"; return {}; } sHdr = elfSection(elf, eHdr, dynsym->sh_link); if (!sHdr) return {}; off_t dynsym_nameoff = sHdr->sh_offset; /* Locate symbol in the .dynsym section. */ const ElfW(Sym) *targetSymbol = nullptr; unsigned int dynsym_num = dynsym->sh_size / dynsym->sh_entsize; for (unsigned int i = 0; i < dynsym_num; i++) { off_t offset = dynsym->sh_offset + dynsym->sh_entsize * i; const ElfW(Sym) *sym = elfPointer<const ElfW(Sym)>(elf, offset); if (!sym) return {}; offset = dynsym_nameoff + sym->st_name; const char *name = elfPointer<const char>(elf, offset, strlen(symbol) + 1); if (!name) return {}; if (!strcmp(name, symbol) && sym->st_info & STB_GLOBAL) { targetSymbol = sym; break; } } if (targetSymbol == nullptr) { LOG(IPAModule, Error) << "Symbol " << symbol << " not found"; return {}; } /* Locate and return data of symbol. */ sHdr = elfSection(elf, eHdr, targetSymbol->st_shndx); if (!sHdr) return {}; off_t offset = sHdr->sh_offset + (targetSymbol->st_value - sHdr->sh_addr); const uint8_t *data = elfPointer<const uint8_t>(elf, offset, targetSymbol->st_size); if (!data) return {}; return { data, targetSymbol->st_size }; } } /* namespace */ /** * \def IPA_MODULE_API_VERSION * \brief The IPA module API version * * This version number specifies the version for the layout of * struct IPAModuleInfo. The IPA module shall use this macro to * set its moduleAPIVersion field. * * \sa IPAModuleInfo::moduleAPIVersion */ /** * \struct IPAModuleInfo * \brief Information of an IPA module * * This structure contains the information of an IPA module. It is loaded, * read, and validated before anything else is loaded from the shared object. * * \var IPAModuleInfo::moduleAPIVersion * \brief The IPA module API version that the IPA module implements * * This version number specifies the version for the layout of * struct IPAModuleInfo. The IPA module shall report here the version that * it was built for, using the macro IPA_MODULE_API_VERSION. * * \var IPAModuleInfo::pipelineVersion * \brief The pipeline handler version that the IPA module is for * * \var IPAModuleInfo::pipelineName * \brief The name of the pipeline handler that the IPA module is for * * This name is used to match a pipeline handler with the module. * * \var IPAModuleInfo::name * \brief The name of the IPA module * * The name may be used to build file system paths to IPA-specific resources. * It shall only contain printable characters, and may not contain '*', '?' or * '\'. For IPA modules included in libcamera, it shall match the directory of * the IPA module in the source tree. * * \todo Allow user to choose to isolate open source IPAs */ /** * \var ipaModuleInfo * \brief Information of an IPA module * * An IPA module must export a struct IPAModuleInfo of this name. */ /** * \class IPAModule * \brief Wrapper around IPA module shared object */ /** * \brief Construct an IPAModule instance * \param[in] libPath path to IPA module shared object * * Loads the IPAModuleInfo from the IPA module shared object at libPath. * The IPA module shared object file must be of the same endianness and * bitness as libcamera. * * The caller shall call the isValid() function after constructing an * IPAModule instance to verify the validity of the IPAModule. */ IPAModule::IPAModule(const std::string &libPath) : libPath_(libPath), valid_(false), loaded_(false), dlHandle_(nullptr), ipaCreate_(nullptr) { if (loadIPAModuleInfo() < 0) return; valid_ = true; } IPAModule::~IPAModule() { if (dlHandle_) dlclose(dlHandle_); } int IPAModule::loadIPAModuleInfo() { File file{ libPath_ }; if (!file.open(File::OpenModeFlag::ReadOnly)) { LOG(IPAModule, Error) << "Failed to open IPA library: " << strerror(-file.error()); return file.error(); } Span<const uint8_t> data = file.map(); int ret = elfVerifyIdent(data); if (ret) { LOG(IPAModule, Error) << "IPA module is not an ELF file"; return ret; } Span<const uint8_t> info = elfLoadSymbol(data, "ipaModuleInfo"); if (info.size() < sizeof(info_)) { LOG(IPAModule, Error) << "IPA module has no valid info"; return -EINVAL; } memcpy(&info_, info.data(), sizeof(info_)); if (info_.moduleAPIVersion != IPA_MODULE_API_VERSION) { LOG(IPAModule, Error) << "IPA module API version mismatch"; return -EINVAL; } /* * Validate the IPA module name. * * \todo Consider module naming restrictions to avoid escaping from a * base directory. Forbidding ".." may be enough, but this may be best * implemented in a different layer. */ std::string ipaName = info_.name; auto iter = std::find_if_not(ipaName.begin(), ipaName.end(), [](unsigned char c) -> bool { return isprint(c) && c != '?' && c != '*' && c != '\\'; }); if (iter != ipaName.end()) { LOG(IPAModule, Error) << "Invalid IPA module name '" << ipaName << "'"; return -EINVAL; } /* Load the signature. Failures are not fatal. */ File sign{ libPath_ + ".sign" }; if (!sign.open(File::OpenModeFlag::ReadOnly)) { LOG(IPAModule, Debug) << "IPA module " << libPath_ << " is not signed"; return 0; } data = sign.map(0, -1, File::MapFlag::Private); signature_.resize(data.size()); memcpy(signature_.data(), data.data(), data.size()); LOG(IPAModule, Debug) << "IPA module " << libPath_ << " is signed"; return 0; } /** * \brief Check if the IPAModule instance is valid * * An IPAModule instance is valid if the IPA module shared object exists and * the IPA module information it contains was successfully retrieved and * validated. * * \return True if the IPAModule is valid, false otherwise */ bool IPAModule::isValid() const { return valid_; } /** * \brief Retrieve the IPA module information * * The content of the IPA module information is loaded from the module, * and is valid only if the module is valid (as returned by isValid()). * Calling this function on an invalid module is an error. * * \return the IPA module information */ const struct IPAModuleInfo &IPAModule::info() const { return info_; } /** * \brief Retrieve the IPA module signature * * The IPA module signature is stored alongside the IPA module in a file with a * '.sign' suffix, and is loaded when the IPAModule instance is created. This * function returns the signature without verifying it. If the signature is * missing, the returned vector will be empty. * * \return The IPA module signature */ const std::vector<uint8_t> IPAModule::signature() const { return signature_; } /** * \brief Retrieve the IPA module path * * The IPA module path is the file name and path of the IPA module shared * object from which the IPA module was created. * * \return The IPA module path */ const std::string &IPAModule::path() const { return libPath_; } /** * \brief Load the IPA implementation factory from the shared object * * The IPA module shared object implements an IPAInterface object to be used * by pipeline handlers. This function loads the factory function from the * shared object. Later, createInterface() can be called to instantiate the * IPAInterface. * * This function only needs to be called successfully once, after which * createInterface() can be called as many times as IPAInterface instances are * needed. * * Calling this function on an invalid module (as returned by isValid()) is * an error. * * \return True if load was successful, or already loaded, and false otherwise */ bool IPAModule::load() { if (!valid_) return false; if (loaded_) return true; dlHandle_ = dlopen(libPath_.c_str(), RTLD_LAZY); if (!dlHandle_) { LOG(IPAModule, Error) << "Failed to open IPA module shared object: " << dlerror(); return false; } void *symbol = dlsym(dlHandle_, "ipaCreate"); if (!symbol) { LOG(IPAModule, Error) << "Failed to load ipaCreate() from IPA module shared object: " << dlerror(); dlclose(dlHandle_); dlHandle_ = nullptr; return false; } ipaCreate_ = reinterpret_cast<IPAIntfFactory>(symbol); loaded_ = true; return true; } /** * \brief Instantiate an IPA interface * * After loading the IPA module with load(), this function creates an instance * of the IPA module interface. * * Calling this function on a module that has not yet been loaded, or an * invalid module (as returned by load() and isValid(), respectively) is * an error. * * \return The IPA interface on success, or nullptr on error */ IPAInterface *IPAModule::createInterface() { if (!valid_ || !loaded_) return nullptr; return ipaCreate_(); } /** * \brief Verify if the IPA module matches a given pipeline handler * \param[in] pipe Pipeline handler to match with * \param[in] minVersion Minimum acceptable version of IPA module * \param[in] maxVersion Maximum acceptable version of IPA module * * This function checks if this IPA module matches the \a pipe pipeline handler, * and the input version range. * * \return True if the pipeline handler matches the IPA module, or false otherwise */ bool IPAModule::match(PipelineHandler *pipe, uint32_t minVersion, uint32_t maxVersion) const { return info_.pipelineVersion >= minVersion && info_.pipelineVersion <= maxVersion && !strcmp(info_.pipelineName, pipe->name()); } std::string IPAModule::logPrefix() const { return utils::basename(libPath_.c_str()); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/yaml_parser.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2022, Google Inc. * * libcamera YAML parsing helper */ #include "libcamera/internal/yaml_parser.h" #include <cstdlib> #include <errno.h> #include <functional> #include <limits> #include <libcamera/base/file.h> #include <libcamera/base/log.h> #include <yaml.h> /** * \file libcamera/internal/yaml_parser.h * \brief A YAML parser helper */ namespace libcamera { LOG_DEFINE_CATEGORY(YamlParser) namespace { /* Empty static YamlObject as a safe result for invalid operations */ static const YamlObject empty; } /* namespace */ /** * \class YamlObject * \brief A class representing the tree structure of the YAML content * * The YamlObject class represents the tree structure of YAML content. A * YamlObject can be a dictionary or list of YamlObjects or a value if a tree * leaf. */ YamlObject::YamlObject() : type_(Type::Value) { } YamlObject::~YamlObject() = default; /** * \fn YamlObject::isValue() * \brief Return whether the YamlObject is a value * * \return True if the YamlObject is a value, false otherwise */ /** * \fn YamlObject::isList() * \brief Return whether the YamlObject is a list * * \return True if the YamlObject is a list, false otherwise */ /** * \fn YamlObject::isDictionary() * \brief Return whether the YamlObject is a dictionary * * \return True if the YamlObject is a dictionary, false otherwise */ /** * \fn YamlObject::size() * \brief Retrieve the number of elements in a dictionary or list YamlObject * * This function retrieves the size of the YamlObject, defined as the number of * child elements it contains. Only YamlObject instances of Dictionary or List * types have a size, calling this function on other types of instances is * invalid and results in undefined behaviour. * * \return The size of the YamlObject */ std::size_t YamlObject::size() const { switch (type_) { case Type::Dictionary: case Type::List: return list_.size(); default: return 0; } } /** * \fn template<typename T> YamlObject::get<T>() const * \brief Parse the YamlObject as a \a T value * * This function parses the value of the YamlObject as a \a T object, and * returns the value. If parsing fails (usually because the YamlObject doesn't * store a \a T value), std::nullopt is returned. * * \return The YamlObject value, or std::nullopt if parsing failed */ /** * \fn template<typename T, typename U> YamlObject::get<T>(U &&defaultValue) const * \brief Parse the YamlObject as a \a T value * \param[in] defaultValue The default value when failing to parse * * This function parses the value of the YamlObject as a \a T object, and * returns the value. If parsing fails (usually because the YamlObject doesn't * store a \a T value), the \a defaultValue is returned. * * \return The YamlObject value, or \a defaultValue if parsing failed */ #ifndef __DOXYGEN__ template<> std::optional<bool> YamlObject::Getter<bool>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; if (obj.value_ == "true") return true; else if (obj.value_ == "false") return false; return std::nullopt; } namespace { bool parseSignedInteger(const std::string &str, long min, long max, long *result) { if (str == "") return false; char *end; errno = 0; long value = std::strtol(str.c_str(), &end, 10); if ('\0' != *end || errno == ERANGE || value < min || value > max) return false; *result = value; return true; } bool parseUnsignedInteger(const std::string &str, unsigned long max, unsigned long *result) { if (str == "") return false; /* * strtoul() accepts strings representing a negative number, in which * case it negates the converted value. We don't want to silently accept * negative values and return a large positive number, so check for a * minus sign (after optional whitespace) and return an error. */ std::size_t found = str.find_first_not_of(" \t"); if (found != std::string::npos && str[found] == '-') return false; char *end; errno = 0; unsigned long value = std::strtoul(str.c_str(), &end, 10); if ('\0' != *end || errno == ERANGE || value > max) return false; *result = value; return true; } } /* namespace */ template<> std::optional<int8_t> YamlObject::Getter<int8_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; long value; if (!parseSignedInteger(obj.value_, std::numeric_limits<int8_t>::min(), std::numeric_limits<int8_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<uint8_t> YamlObject::Getter<uint8_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; unsigned long value; if (!parseUnsignedInteger(obj.value_, std::numeric_limits<uint8_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<int16_t> YamlObject::Getter<int16_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; long value; if (!parseSignedInteger(obj.value_, std::numeric_limits<int16_t>::min(), std::numeric_limits<int16_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<uint16_t> YamlObject::Getter<uint16_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; unsigned long value; if (!parseUnsignedInteger(obj.value_, std::numeric_limits<uint16_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<int32_t> YamlObject::Getter<int32_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; long value; if (!parseSignedInteger(obj.value_, std::numeric_limits<int32_t>::min(), std::numeric_limits<int32_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<uint32_t> YamlObject::Getter<uint32_t>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; unsigned long value; if (!parseUnsignedInteger(obj.value_, std::numeric_limits<uint32_t>::max(), &value)) return std::nullopt; return value; } template<> std::optional<float> YamlObject::Getter<float>::get(const YamlObject &obj) const { return obj.get<double>(); } template<> std::optional<double> YamlObject::Getter<double>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; if (obj.value_ == "") return std::nullopt; char *end; errno = 0; double value = utils::strtod(obj.value_.c_str(), &end); if ('\0' != *end || errno == ERANGE) return std::nullopt; return value; } template<> std::optional<std::string> YamlObject::Getter<std::string>::get(const YamlObject &obj) const { if (obj.type_ != Type::Value) return std::nullopt; return obj.value_; } template<> std::optional<Size> YamlObject::Getter<Size>::get(const YamlObject &obj) const { if (obj.type_ != Type::List) return std::nullopt; if (obj.list_.size() != 2) return std::nullopt; auto width = obj.list_[0].value->get<uint32_t>(); if (!width) return std::nullopt; auto height = obj.list_[1].value->get<uint32_t>(); if (!height) return std::nullopt; return Size(*width, *height); } #endif /* __DOXYGEN__ */ /** * \fn template<typename T> YamlObject::getList<T>() const * \brief Parse the YamlObject as a list of \a T * * This function parses the value of the YamlObject as a list of \a T objects, * and returns the value as a \a std::vector<T>. If parsing fails, std::nullopt * is returned. * * \return The YamlObject value as a std::vector<T>, or std::nullopt if parsing * failed */ #ifndef __DOXYGEN__ template<typename T, std::enable_if_t< std::is_same_v<bool, T> || std::is_same_v<float, T> || std::is_same_v<double, T> || std::is_same_v<int8_t, T> || std::is_same_v<uint8_t, T> || std::is_same_v<int16_t, T> || std::is_same_v<uint16_t, T> || std::is_same_v<int32_t, T> || std::is_same_v<uint32_t, T> || std::is_same_v<std::string, T> || std::is_same_v<Size, T>> *> std::optional<std::vector<T>> YamlObject::getList() const { if (type_ != Type::List) return std::nullopt; std::vector<T> values; values.reserve(list_.size()); for (const YamlObject &entry : asList()) { const auto value = entry.get<T>(); if (!value) return std::nullopt; values.emplace_back(*value); } return values; } template std::optional<std::vector<bool>> YamlObject::getList<bool>() const; template std::optional<std::vector<float>> YamlObject::getList<float>() const; template std::optional<std::vector<double>> YamlObject::getList<double>() const; template std::optional<std::vector<int8_t>> YamlObject::getList<int8_t>() const; template std::optional<std::vector<uint8_t>> YamlObject::getList<uint8_t>() const; template std::optional<std::vector<int16_t>> YamlObject::getList<int16_t>() const; template std::optional<std::vector<uint16_t>> YamlObject::getList<uint16_t>() const; template std::optional<std::vector<int32_t>> YamlObject::getList<int32_t>() const; template std::optional<std::vector<uint32_t>> YamlObject::getList<uint32_t>() const; template std::optional<std::vector<std::string>> YamlObject::getList<std::string>() const; template std::optional<std::vector<Size>> YamlObject::getList<Size>() const; #endif /* __DOXYGEN__ */ /** * \fn YamlObject::asDict() const * \brief Wrap a dictionary YamlObject in an adapter that exposes iterators * * The YamlObject class doesn't directly implement iterators, as the iterator * type depends on whether the object is a Dictionary or List. This function * wraps a YamlObject of Dictionary type into an adapter that exposes * iterators, as well as begin() and end() functions, allowing usage of * range-based for loops with YamlObject. As YAML mappings are not ordered, the * iteration order is not specified. * * The iterator's value_type is a * <em>std::pair<const std::string &, const \ref YamlObject &></em>. * * If the YamlObject is not of Dictionary type, the returned adapter operates * as an empty container. * * \return An adapter of unspecified type compatible with range-based for loops */ /** * \fn YamlObject::asList() const * \brief Wrap a list YamlObject in an adapter that exposes iterators * * The YamlObject class doesn't directly implement iterators, as the iterator * type depends on whether the object is a Dictionary or List. This function * wraps a YamlObject of List type into an adapter that exposes iterators, as * well as begin() and end() functions, allowing usage of range-based for loops * with YamlObject. As YAML lists are ordered, the iteration order is identical * to the list order in the YAML data. * * The iterator's value_type is a <em>const YamlObject &</em>. * * If the YamlObject is not of List type, the returned adapter operates as an * empty container. * * \return An adapter of unspecified type compatible with range-based for loops */ /** * \fn YamlObject::operator[](std::size_t index) const * \brief Retrieve the element from list YamlObject by index * * This function retrieves an element of the YamlObject. Only YamlObject * instances of List type associate elements with index, calling this function * on other types of instances is invalid and results in undefined behaviour. * * \return The YamlObject as an element of the list */ const YamlObject &YamlObject::operator[](std::size_t index) const { if (type_ != Type::List || index >= size()) return empty; return *list_[index].value; } /** * \fn YamlObject::contains() * \brief Check if an element of a dictionary exists * * This function check if the YamlObject contains an element. Only YamlObject * instances of Dictionary type associate elements with names, calling this * function on other types of instances is invalid and results in undefined * behaviour. * * \return True if an element exists, false otherwise */ bool YamlObject::contains(const std::string &key) const { if (dictionary_.find(std::ref(key)) == dictionary_.end()) return false; return true; } /** * \fn YamlObject::operator[](const std::string &key) const * \brief Retrieve a member by name from the dictionary * * This function retrieve a member of a YamlObject by name. Only YamlObject * instances of Dictionary type associate elements with names, calling this * function on other types of instances is invalid and results in undefined * behaviour. * * \return The YamlObject corresponding to the \a key member */ const YamlObject &YamlObject::operator[](const std::string &key) const { if (type_ != Type::Dictionary) return empty; auto iter = dictionary_.find(key); if (iter == dictionary_.end()) return empty; return *iter->second; } #ifndef __DOXYGEN__ class YamlParserContext { public: YamlParserContext(); ~YamlParserContext(); int init(File &file); int parseContent(YamlObject &yamlObject); private: struct EventDeleter { void operator()(yaml_event_t *event) const { yaml_event_delete(event); delete event; } }; using EventPtr = std::unique_ptr<yaml_event_t, EventDeleter>; static int yamlRead(void *data, unsigned char *buffer, size_t size, size_t *sizeRead); EventPtr nextEvent(); void readValue(std::string &value, EventPtr event); int parseDictionaryOrList(YamlObject::Type type, const std::function<int(EventPtr event)> &parseItem); int parseNextYamlObject(YamlObject &yamlObject, EventPtr event); bool parserValid_; yaml_parser_t parser_; }; /** * \class YamlParserContext * \brief Class for YamlParser parsing and context data * * The YamlParserContext class stores the internal yaml_parser_t and provides * helper functions to do event-based parsing for YAML files. */ YamlParserContext::YamlParserContext() : parserValid_(false) { } /** * \class YamlParserContext * \brief Destructor of YamlParserContext */ YamlParserContext::~YamlParserContext() { if (parserValid_) { yaml_parser_delete(&parser_); parserValid_ = false; } } /** * \fn YamlParserContext::init() * \brief Initialize a parser with an opened file for parsing * \param[in] fh The YAML file to parse * * Prior to parsing the YAML content, the YamlParserContext must be initialized * with a file to create an internal parser. The file needs to stay valid until * parsing completes. * * \return 0 on success or a negative error code otherwise * \retval -EINVAL The parser has failed to initialize */ int YamlParserContext::init(File &file) { /* yaml_parser_initialize returns 1 when it succeededs */ if (!yaml_parser_initialize(&parser_)) { LOG(YamlParser, Error) << "Failed to initialize YAML parser"; return -EINVAL; } parserValid_ = true; yaml_parser_set_input(&parser_, &YamlParserContext::yamlRead, &file); return 0; } int YamlParserContext::yamlRead(void *data, unsigned char *buffer, size_t size, size_t *sizeRead) { File *file = static_cast<File *>(data); Span<unsigned char> buf{ buffer, size }; ssize_t ret = file->read(buf); if (ret < 0) return 0; *sizeRead = ret; return 1; } /** * \fn YamlParserContext::nextEvent() * \brief Get the next event * * Get the next event in the current YAML event stream, and return nullptr when * there is no more event. * * \return The next event on success or nullptr otherwise */ YamlParserContext::EventPtr YamlParserContext::nextEvent() { EventPtr event(new yaml_event_t); /* yaml_parser_parse returns 1 when it succeeds */ if (!yaml_parser_parse(&parser_, event.get())) return nullptr; return event; } /** * \fn YamlParserContext::parseContent() * \brief Parse the content of a YAML document * \param[in] yamlObject The result of YamlObject * * Check YAML start and end events of a YAML document, and parse the root object * of the YAML document into a YamlObject. * * \return 0 on success or a negative error code otherwise * \retval -EINVAL The parser has failed to validate end of a YAML file */ int YamlParserContext::parseContent(YamlObject &yamlObject) { /* Check start of the YAML file. */ EventPtr event = nextEvent(); if (!event || event->type != YAML_STREAM_START_EVENT) return -EINVAL; event = nextEvent(); if (!event || event->type != YAML_DOCUMENT_START_EVENT) return -EINVAL; /* Parse the root object. */ event = nextEvent(); if (parseNextYamlObject(yamlObject, std::move(event))) return -EINVAL; /* Check end of the YAML file. */ event = nextEvent(); if (!event || event->type != YAML_DOCUMENT_END_EVENT) return -EINVAL; event = nextEvent(); if (!event || event->type != YAML_STREAM_END_EVENT) return -EINVAL; return 0; } /** * \fn YamlParserContext::readValue() * \brief Parse event scalar and fill its content into a string * \param[in] value The string reference to fill value * * A helper function to parse a scalar event as string. The caller needs to * guarantee the event is of scaler type. */ void YamlParserContext::readValue(std::string &value, EventPtr event) { value.assign(reinterpret_cast<char *>(event->data.scalar.value), event->data.scalar.length); } /** * \fn YamlParserContext::parseDictionaryOrList() * \brief A helper function to abstract the common part of parsing dictionary or list * * \param[in] isDictionary True for parsing a dictionary, and false for a list * \param[in] parseItem The callback to handle an item * * A helper function to abstract parsing an item from a dictionary or a list. * The differences of them in a YAML event stream are: * * 1. The start and end event types are different * 2. There is a leading scalar string as key in the items of a dictionary * * The caller should handle the leading key string in its callback parseItem * when it's a dictionary. * * \return 0 on success or a negative error code otherwise * \retval -EINVAL The parser is failed to initialize */ int YamlParserContext::parseDictionaryOrList(YamlObject::Type type, const std::function<int(EventPtr event)> &parseItem) { yaml_event_type_t endEventType = YAML_SEQUENCE_END_EVENT; if (type == YamlObject::Type::Dictionary) endEventType = YAML_MAPPING_END_EVENT; /* * Add a safety counter to make sure we don't loop indefinitely in case * the YAML file is malformed. */ for (unsigned int sentinel = 2000; sentinel; sentinel--) { auto evt = nextEvent(); if (!evt) return -EINVAL; if (evt->type == endEventType) return 0; int ret = parseItem(std::move(evt)); if (ret) return ret; } LOG(YamlParser, Error) << "The YAML file contains a List or Dictionary" " whose size exceeds the parser's limit (1000)"; return -EINVAL; } /** * \fn YamlParserContext::parseNextYamlObject() * \brief Parse next YAML event and read it as a YamlObject * \param[in] yamlObject The result of YamlObject * \param[in] event The leading event of the object * * Parse next YAML object separately as a value, list or dictionary. * * \return 0 on success or a negative error code otherwise * \retval -EINVAL Fail to parse the YAML file. */ int YamlParserContext::parseNextYamlObject(YamlObject &yamlObject, EventPtr event) { if (!event) return -EINVAL; switch (event->type) { case YAML_SCALAR_EVENT: yamlObject.type_ = YamlObject::Type::Value; readValue(yamlObject.value_, std::move(event)); return 0; case YAML_SEQUENCE_START_EVENT: { yamlObject.type_ = YamlObject::Type::List; auto &list = yamlObject.list_; auto handler = [this, &list](EventPtr evt) { list.emplace_back(std::string{}, std::make_unique<YamlObject>()); return parseNextYamlObject(*list.back().value, std::move(evt)); }; return parseDictionaryOrList(YamlObject::Type::List, handler); } case YAML_MAPPING_START_EVENT: { yamlObject.type_ = YamlObject::Type::Dictionary; auto &list = yamlObject.list_; auto handler = [this, &list](EventPtr evtKey) { /* Parse key */ if (evtKey->type != YAML_SCALAR_EVENT) { LOG(YamlParser, Error) << "Expect key at line: " << evtKey->start_mark.line << " column: " << evtKey->start_mark.column; return -EINVAL; } std::string key; readValue(key, std::move(evtKey)); /* Parse value */ EventPtr evtValue = nextEvent(); if (!evtValue) return -EINVAL; auto &elem = list.emplace_back(std::move(key), std::make_unique<YamlObject>()); return parseNextYamlObject(*elem.value, std::move(evtValue)); }; int ret = parseDictionaryOrList(YamlObject::Type::Dictionary, handler); if (ret) return ret; auto &dictionary = yamlObject.dictionary_; for (const auto &elem : list) dictionary.emplace(elem.key, elem.value.get()); return 0; } default: LOG(YamlParser, Error) << "Invalid YAML file"; return -EINVAL; } } #endif /* __DOXYGEN__ */ /** * \class YamlParser * \brief A helper class for parsing a YAML file * * The YamlParser class provides an easy interface to parse the contents of a * YAML file into a tree of YamlObject instances. * * Example usage: * * \code{.unparsed} * * name: * "John" * numbers: * - 1 * - 2 * * \endcode * * The following code illustrates how to parse the above YAML file: * * \code{.cpp} * * std::unique_ptr<YamlObject> root = YamlParser::parse(fh); * if (!root) * return; * * if (!root->isDictionary()) * return; * * const YamlObject &name = (*root)["name"]; * std::cout << name.get<std::string>("") << std::endl; * * const YamlObject &numbers = (*root)["numbers"]; * if (!numbers.isList()) * return; * * for (std::size_t i = 0; i < numbers.size(); i++) * std::cout << numbers[i].get<int32_t>(0) << std::endl; * * \endcode * * The YamlParser::parse() function takes an open FILE, parses its contents, and * returns a pointer to a YamlObject corresponding to the root node of the YAML * document. * * The parser preserves the order of items in the YAML file, for both lists and * dictionaries. */ /** * \brief Parse a YAML file as a YamlObject * \param[in] file The YAML file to parse * * The YamlParser::parse() function takes a file, parses its contents, and * returns a pointer to a YamlObject corresponding to the root node of the YAML * document. * * \return Pointer to result YamlObject on success or nullptr otherwise */ std::unique_ptr<YamlObject> YamlParser::parse(File &file) { YamlParserContext context; if (context.init(file)) return nullptr; std::unique_ptr<YamlObject> root(new YamlObject()); if (context.parseContent(*root)) { LOG(YamlParser, Error) << "Failed to parse YAML content from " << file.fileName(); return nullptr; } return root; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_manager.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Image Processing Algorithm module manager */ #include "libcamera/internal/ipa_manager.h" #include <algorithm> #include <dirent.h> #include <string.h> #include <sys/types.h> #include <libcamera/base/file.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include "libcamera/internal/ipa_module.h" #include "libcamera/internal/ipa_proxy.h" #include "libcamera/internal/pipeline_handler.h" /** * \file ipa_manager.h * \brief Image Processing Algorithm module manager */ namespace libcamera { LOG_DEFINE_CATEGORY(IPAManager) /** * \class IPAManager * \brief Manager for IPA modules * * The IPA module manager discovers IPA modules from disk, queries and loads * them, and creates IPA contexts. It supports isolation of the modules in a * separate process with IPC communication and offers a unified IPAInterface * view of the IPA contexts to pipeline handlers regardless of whether the * modules are isolated or loaded in the same process. * * Module isolation is based on the module licence. Open-source modules are * loaded without isolation, while closed-source module are forcefully isolated. * The isolation mechanism ensures that no code from a closed-source module is * ever run in the libcamera process. * * To create an IPA context, pipeline handlers call the IPAManager::createIPA() * function. For a directly loaded module, the manager calls the module's * ipaCreate() function directly and wraps the returned context in an * IPAContextWrapper that exposes an IPAInterface. * * ~~~~ * +---------------+ * | Pipeline | * | Handler | * +---------------+ * | * v * +---------------+ +---------------+ * | IPA | | Open Source | * | Interface | | IPA Module | * | - - - - - - - | | - - - - - - - | * | IPA Context | ipa_context_ops | ipa_context | * | Wrapper | ----------------> | | * +---------------+ +---------------+ * ~~~~ * * For an isolated module, the manager instantiates an IPAProxy which spawns a * new process for an IPA proxy worker. The worker loads the IPA module and * creates the IPA context. The IPAProxy alse exposes an IPAInterface. * * ~~~~ * +---------------+ +---------------+ * | Pipeline | | Closed Source | * | Handler | | IPA Module | * +---------------+ | - - - - - - - | * | | ipa_context | * v | | * +---------------+ +---------------+ * | IPA | ipa_context_ops ^ * | Interface | | * | - - - - - - - | +---------------+ * | IPA Proxy | operations | IPA Proxy | * | | ----------------> | Worker | * +---------------+ over IPC +---------------+ * ~~~~ * * The IPAInterface implemented by the IPAContextWrapper or IPAProxy is * returned to the pipeline handler, and all interactions with the IPA context * go the same interface regardless of process isolation. * * In all cases the data passed to the IPAInterface member functions is * serialized to Plain Old Data, either for the purpose of passing it to the IPA * context plain C API, or to transmit the data to the isolated process through * IPC. */ IPAManager *IPAManager::self_ = nullptr; /** * \brief Construct an IPAManager instance * * The IPAManager class is meant to only be instantiated once, by the * CameraManager. */ IPAManager::IPAManager() { if (self_) LOG(IPAManager, Fatal) << "Multiple IPAManager objects are not allowed"; #if HAVE_IPA_PUBKEY if (!pubKey_.isValid()) LOG(IPAManager, Warning) << "Public key not valid"; #endif unsigned int ipaCount = 0; /* User-specified paths take precedence. */ const char *modulePaths = utils::secure_getenv("LIBCAMERA_IPA_MODULE_PATH"); if (modulePaths) { for (const auto &dir : utils::split(modulePaths, ":")) { if (dir.empty()) continue; ipaCount += addDir(dir.c_str()); } if (!ipaCount) LOG(IPAManager, Warning) << "No IPA found in '" << modulePaths << "'"; } /* * When libcamera is used before it is installed, load IPAs from the * same build directory as the libcamera library itself. */ std::string root = utils::libcameraBuildPath(); if (!root.empty()) { std::string ipaBuildPath = root + "src/ipa"; constexpr int maxDepth = 2; LOG(IPAManager, Info) << "libcamera is not installed. Adding '" << ipaBuildPath << "' to the IPA search path"; ipaCount += addDir(ipaBuildPath.c_str(), maxDepth); } /* Finally try to load IPAs from the installed system path. */ ipaCount += addDir(IPA_MODULE_DIR); if (!ipaCount) LOG(IPAManager, Warning) << "No IPA found in '" IPA_MODULE_DIR "'"; self_ = this; } IPAManager::~IPAManager() { for (IPAModule *module : modules_) delete module; self_ = nullptr; } /** * \brief Identify shared library objects within a directory * \param[in] libDir The directory to search for shared objects * \param[in] maxDepth The maximum depth of sub-directories to parse * \param[out] files A vector of paths to shared object library files * * Search a directory for .so files, allowing recursion down to sub-directories * no further than the depth specified by \a maxDepth. * * Discovered shared objects are added to the \a files vector. */ void IPAManager::parseDir(const char *libDir, unsigned int maxDepth, std::vector<std::string> &files) { struct dirent *ent; DIR *dir; dir = opendir(libDir); if (!dir) return; while ((ent = readdir(dir)) != nullptr) { if (ent->d_type == DT_DIR && maxDepth) { if (strcmp(ent->d_name, ".") == 0 || strcmp(ent->d_name, "..") == 0) continue; std::string subdir = std::string(libDir) + "/" + ent->d_name; /* Recursion is limited to maxDepth. */ parseDir(subdir.c_str(), maxDepth - 1, files); continue; } int offset = strlen(ent->d_name) - 3; if (offset < 0) continue; if (strcmp(&ent->d_name[offset], ".so")) continue; files.push_back(std::string(libDir) + "/" + ent->d_name); } closedir(dir); } /** * \brief Load IPA modules from a directory * \param[in] libDir The directory to search for IPA modules * \param[in] maxDepth The maximum depth of sub-directories to search * * This function tries to create an IPAModule instance for every shared object * found in \a libDir, and skips invalid IPA modules. * * Sub-directories are searched up to a depth of \a maxDepth. A \a maxDepth * value of 0 only searches the directory specified in \a libDir. * * \return Number of modules loaded by this call */ unsigned int IPAManager::addDir(const char *libDir, unsigned int maxDepth) { std::vector<std::string> files; parseDir(libDir, maxDepth, files); /* Ensure a stable ordering of modules. */ std::sort(files.begin(), files.end()); unsigned int count = 0; for (const std::string &file : files) { IPAModule *ipaModule = new IPAModule(file); if (!ipaModule->isValid()) { delete ipaModule; continue; } LOG(IPAManager, Debug) << "Loaded IPA module '" << file << "'"; modules_.push_back(ipaModule); count++; } return count; } /** * \brief Retrieve an IPA module that matches a given pipeline handler * \param[in] pipe The pipeline handler * \param[in] minVersion Minimum acceptable version of IPA module * \param[in] maxVersion Maximum acceptable version of IPA module */ IPAModule *IPAManager::module(PipelineHandler *pipe, uint32_t minVersion, uint32_t maxVersion) { for (IPAModule *module : modules_) { if (module->match(pipe, minVersion, maxVersion)) return module; } return nullptr; } /** * \fn IPAManager::createIPA() * \brief Create an IPA proxy that matches a given pipeline handler * \param[in] pipe The pipeline handler that wants a matching IPA proxy * \param[in] minVersion Minimum acceptable version of IPA module * \param[in] maxVersion Maximum acceptable version of IPA module * * \return A newly created IPA proxy, or nullptr if no matching IPA module is * found or if the IPA proxy fails to initialize */ #if HAVE_IPA_PUBKEY /** * \fn IPAManager::pubKey() * \brief Retrieve the IPA module signing public key * * IPA module signature verification is normally handled internally by the * IPAManager class. This function is meant to be used by utilities that need to * verify signatures externally. * * \return The IPA module signing public key */ #endif bool IPAManager::isSignatureValid([[maybe_unused]] IPAModule *ipa) const { #if HAVE_IPA_PUBKEY char *force = utils::secure_getenv("LIBCAMERA_IPA_FORCE_ISOLATION"); if (force && force[0] != '\0') { LOG(IPAManager, Debug) << "Isolation of IPA module " << ipa->path() << " forced through environment variable"; return false; } File file{ ipa->path() }; if (!file.open(File::OpenModeFlag::ReadOnly)) return false; Span<uint8_t> data = file.map(); if (data.empty()) return false; bool valid = pubKey_.verify(data, ipa->signature()); LOG(IPAManager, Debug) << "IPA module " << ipa->path() << " signature is " << (valid ? "valid" : "not valid"); return valid; #else return false; #endif } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/camera.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Camera device */ #include <libcamera/camera.h> #include <array> #include <atomic> #include <iomanip> #include <libcamera/base/log.h> #include <libcamera/base/thread.h> #include <libcamera/color_space.h> #include <libcamera/framebuffer_allocator.h> #include <libcamera/request.h> #include <libcamera/stream.h> #include "libcamera/internal/camera.h" #include "libcamera/internal/camera_controls.h" #include "libcamera/internal/formats.h" #include "libcamera/internal/pipeline_handler.h" #include "libcamera/internal/request.h" /** * \file libcamera/camera.h * \brief Camera device handling * * \page camera-model Camera Model * * libcamera acts as a middleware between applications and camera hardware. It * provides a solution to an unsolvable problem: reconciling applications, * which need to run on different systems without dealing with device-specific * details, and camera hardware, which exhibits a wide variety of features, * limitations and architecture variations. In order to do so, it creates an * abstract camera model that hides the camera hardware from applications. The * model is designed to strike the right balance between genericity, to please * generic applications, and flexibility, to expose even the most specific * hardware features to the most demanding applications. * * In libcamera, a Camera is defined as a device that can capture frames * continuously from a camera sensor and store them in memory. If supported by * the device and desired by the application, the camera may store each * captured frame in multiple copies, possibly in different formats and sizes. * Each of these memory outputs of the camera is called a Stream. * * A camera contains a single image source, and separate camera instances * relate to different image sources. For instance, a phone containing front * and back image sensors will be modelled with two cameras, one for each * sensor. When multiple streams can be produced from the same image source, * all those streams are guaranteed to be part of the same camera. * * While not sharing image sources, separate cameras can share other system * resources, such as ISPs. For this reason camera instances may not be fully * independent, in which case usage restrictions may apply. For instance, a * phone with a front and a back camera may not allow usage of the two cameras * simultaneously. * * The camera model defines an implicit pipeline, whose input is the camera * sensor, and whose outputs are the streams. Along the pipeline, the frames * produced by the camera sensor are transformed by the camera into a format * suitable for applications, with image processing that improves the quality * of the captured frames. The camera exposes a set of controls that * applications may use to manually control the processing steps. This * high-level camera model is the minimum baseline that all cameras must * conform to. * * \section camera-pipeline-model Pipeline Model * * Camera hardware differs in the supported image processing operations and the * order in which they are applied. The libcamera pipelines abstract the * hardware differences and expose a logical view of the processing operations * with a fixed order. This offers low-level control of those operations to * applications, while keeping application code generic. * * Starting from the camera sensor, a pipeline applies the following * operations, in that order. * * - Pixel exposure * - Analog to digital conversion and readout * - Black level subtraction * - Defective pixel correction * - Lens shading correction * - Spatial noise filtering * - Per-channel gains (white balance) * - Demosaicing (color filter array interpolation) * - Color correction matrix (typically RGB to RGB) * - Gamma correction * - Color space transformation (typically RGB to YUV) * - Cropping * - Scaling * * Not all cameras implement all operations, and they are not necessarily * implemented in the above order at the hardware level. The libcamera pipeline * handlers translate the pipeline model to the real hardware configuration. * * \subsection camera-sensor-model Camera Sensor Model * * By default, libcamera configures the camera sensor automatically based on the * configuration of the streams. Applications may instead specify a manual * configuration for the camera sensor. This allows precise control of the frame * geometry and frame rate delivered by the sensor. * * More details about the camera sensor model implemented by libcamera are * available in the libcamera camera-sensor-model documentation page. * * \subsection digital-zoom Digital Zoom * * Digital zoom is implemented as a combination of the cropping and scaling * stages of the pipeline. Cropping is controlled explicitly through the * controls::ScalerCrop control, while scaling is controlled implicitly based * on the crop rectangle and the output stream size. The crop rectangle is * expressed relatively to the full pixel array size and indicates how the field * of view is affected by the pipeline. */ namespace libcamera { LOG_DECLARE_CATEGORY(Camera) /** * \class SensorConfiguration * \brief Camera sensor configuration * * The SensorConfiguration class collects parameters to control the operations * of the camera sensor, according to the abstract camera sensor model * implemented by libcamera. * * \todo Applications shall fully populate all fields of the * CameraConfiguration::sensorConfig class members before validating the * CameraConfiguration. If the SensorConfiguration is not fully populated, or if * any of its parameters cannot be applied to the sensor in use, the * CameraConfiguration validation process will fail and return * CameraConfiguration::Status::Invalid. * * Applications that populate the SensorConfiguration class members are * expected to be highly-specialized applications that know what sensor * they are operating with and what parameters are valid for the sensor in use. * * A detailed description of the abstract camera sensor model implemented by * libcamera and the description of its configuration parameters is available * in the libcamera documentation camera-sensor-model file. */ /** * \var SensorConfiguration::bitDepth * \brief The sensor image format bit depth * * The number of bits (resolution) used to represent a pixel sample. */ /** * \var SensorConfiguration::analogCrop * \brief The analog crop rectangle * * The selected portion of the active pixel array used to produce the image * frame. */ /** * \var SensorConfiguration::binning * \brief Sensor binning configuration * * Refer to the camera-sensor-model documentation for an accurate description * of the binning operations. Disabled by default. */ /** * \var SensorConfiguration::binX * \brief Horizontal binning factor * * The horizontal binning factor. Default to 1. */ /** * \var SensorConfiguration::binY * \brief Vertical binning factor * * The vertical binning factor. Default to 1. */ /** * \var SensorConfiguration::skipping * \brief The sensor skipping configuration * * Refer to the camera-sensor-model documentation for an accurate description * of the skipping operations. * * If no skipping is performed, all the structure fields should be * set to 1. Disabled by default. */ /** * \var SensorConfiguration::xOddInc * \brief Horizontal increment for odd rows. Default to 1. */ /** * \var SensorConfiguration::xEvenInc * \brief Horizontal increment for even rows. Default to 1. */ /** * \var SensorConfiguration::yOddInc * \brief Vertical increment for odd columns. Default to 1. */ /** * \var SensorConfiguration::yEvenInc * \brief Vertical increment for even columns. Default to 1. */ /** * \var SensorConfiguration::outputSize * \brief The frame output (visible) size * * The size of the data frame as received by the host processor. */ /** * \brief Check if the sensor configuration is valid * * A sensor configuration is valid if it's fully populated. * * \todo For now allow applications to populate the bitDepth and the outputSize * only as skipping and binnings factors are initialized to 1 and the analog * crop is ignored. * * \return True if the sensor configuration is valid, false otherwise */ bool SensorConfiguration::isValid() const { if (bitDepth && binning.binX && binning.binY && skipping.xOddInc && skipping.yOddInc && skipping.xEvenInc && skipping.yEvenInc && !outputSize.isNull()) return true; return false; } /** * \class CameraConfiguration * \brief Hold configuration for streams of the camera * The CameraConfiguration holds an ordered list of stream configurations. It * supports iterators and operates as a vector of StreamConfiguration instances. * The stream configurations are inserted by addConfiguration(), and the * at() function or operator[] return a reference to the StreamConfiguration * based on its insertion index. Accessing a stream configuration with an * invalid index results in undefined behaviour. * * CameraConfiguration instances are retrieved from the camera with * Camera::generateConfiguration(). Applications may then inspect the * configuration, modify it, and possibly add new stream configuration entries * with addConfiguration(). Once the camera configuration satisfies the * application, it shall be validated by a call to validate(). The validation * implements "try" semantics: it adjusts invalid configurations to the closest * achievable parameters instead of rejecting them completely. Applications * then decide whether to accept the modified configuration, or try again with * a different set of parameters. Once the configuration is valid, it is passed * to Camera::configure(). */ /** * \enum CameraConfiguration::Status * \brief Validity of a camera configuration * \var CameraConfiguration::Valid * The configuration is fully valid * \var CameraConfiguration::Adjusted * The configuration has been adjusted to a valid configuration * \var CameraConfiguration::Invalid * The configuration is invalid and can't be adjusted automatically */ /** * \typedef CameraConfiguration::iterator * \brief Iterator for the stream configurations in the camera configuration */ /** * \typedef CameraConfiguration::const_iterator * \brief Const iterator for the stream configuration in the camera * configuration */ /** * \brief Create an empty camera configuration */ CameraConfiguration::CameraConfiguration() : orientation(Orientation::Rotate0), config_({}) { } CameraConfiguration::~CameraConfiguration() { } /** * \brief Add a stream configuration to the camera configuration * \param[in] cfg The stream configuration */ void CameraConfiguration::addConfiguration(const StreamConfiguration &cfg) { config_.push_back(cfg); } /** * \fn CameraConfiguration::validate() * \brief Validate and possibly adjust the camera configuration * * This function adjusts the camera configuration to the closest valid * configuration and returns the validation status. * * \todo Define exactly when to return each status code. Should stream * parameters set to 0 by the caller be adjusted without returning Adjusted ? * This would potentially be useful for applications but would get in the way * in Camera::configure(). Do we need an extra status code to signal this ? * * \todo Handle validation of buffers count when refactoring the buffers API. * * \return A CameraConfiguration::Status value that describes the validation * status. * \retval CameraConfiguration::Invalid The configuration is invalid and can't * be adjusted. This may only occur in extreme cases such as when the * configuration is empty. * \retval CameraConfigutation::Adjusted The configuration has been adjusted * and is now valid. Parameters may have changed for any stream, and stream * configurations may have been removed. The caller shall check the * configuration carefully. * \retval CameraConfiguration::Valid The configuration was already valid and * hasn't been adjusted. */ /** * \brief Retrieve a reference to a stream configuration * \param[in] index Numerical index * * The \a index represents the zero based insertion order of stream * configuration into the camera configuration with addConfiguration(). Calling * this function with an invalid index results in undefined behaviour. * * \return The stream configuration */ StreamConfiguration &CameraConfiguration::at(unsigned int index) { return config_[index]; } /** * \brief Retrieve a const reference to a stream configuration * \param[in] index Numerical index * * The \a index represents the zero based insertion order of stream * configuration into the camera configuration with addConfiguration(). Calling * this function with an invalid index results in undefined behaviour. * * \return The stream configuration */ const StreamConfiguration &CameraConfiguration::at(unsigned int index) const { return config_[index]; } /** * \fn StreamConfiguration &CameraConfiguration::operator[](unsigned int) * \brief Retrieve a reference to a stream configuration * \param[in] index Numerical index * * The \a index represents the zero based insertion order of stream * configuration into the camera configuration with addConfiguration(). Calling * this function with an invalid index results in undefined behaviour. * * \return The stream configuration */ /** * \fn const StreamConfiguration &CameraConfiguration::operator[](unsigned int) const * \brief Retrieve a const reference to a stream configuration * \param[in] index Numerical index * * The \a index represents the zero based insertion order of stream * configuration into the camera configuration with addConfiguration(). Calling * this function with an invalid index results in undefined behaviour. * * \return The stream configuration */ /** * \brief Retrieve an iterator to the first stream configuration in the * sequence * \return An iterator to the first stream configuration */ CameraConfiguration::iterator CameraConfiguration::begin() { return config_.begin(); } /** * \brief Retrieve a const iterator to the first element of the stream * configurations * \return A const iterator to the first stream configuration */ CameraConfiguration::const_iterator CameraConfiguration::begin() const { return config_.begin(); } /** * \brief Retrieve an iterator pointing to the past-the-end stream * configuration in the sequence * \return An iterator to the element following the last stream configuration */ CameraConfiguration::iterator CameraConfiguration::end() { return config_.end(); } /** * \brief Retrieve a const iterator pointing to the past-the-end stream * configuration in the sequence * \return A const iterator to the element following the last stream * configuration */ CameraConfiguration::const_iterator CameraConfiguration::end() const { return config_.end(); } /** * \brief Check if the camera configuration is empty * \return True if the configuration is empty */ bool CameraConfiguration::empty() const { return config_.empty(); } /** * \brief Retrieve the number of stream configurations * \return Number of stream configurations */ std::size_t CameraConfiguration::size() const { return config_.size(); } /** * \enum CameraConfiguration::ColorSpaceFlag * \brief Specify the behaviour of validateColorSpaces * \var CameraConfiguration::ColorSpaceFlag::None * \brief No extra validation of color spaces is required * \var CameraConfiguration::ColorSpaceFlag::StreamsShareColorSpace * \brief Non-raw output streams must share the same color space */ /** * \typedef CameraConfiguration::ColorSpaceFlags * \brief A bitwise combination of ColorSpaceFlag values */ /** * \brief Check the color spaces requested for each stream * \param[in] flags Flags to control the behaviour of this function * * This function performs certain consistency checks on the color spaces of * the streams and may adjust them so that: * * - Any raw streams have the Raw color space * - If the StreamsShareColorSpace flag is set, all output streams are forced * to share the same color space (this may be a constraint on some platforms). * * It is optional for a pipeline handler to use this function. * * \return A CameraConfiguration::Status value that describes the validation * status. * \retval CameraConfigutation::Adjusted The configuration has been adjusted * and is now valid. The color space of some or all of the streams may have * been changed. The caller shall check the color spaces carefully. * \retval CameraConfiguration::Valid The configuration was already valid and * hasn't been adjusted. */ CameraConfiguration::Status CameraConfiguration::validateColorSpaces(ColorSpaceFlags flags) { Status status = Valid; /* * Set all raw streams to the Raw color space, and make a note of the * largest non-raw stream with a defined color space (if there is one). */ std::optional<ColorSpace> colorSpace; Size size; for (StreamConfiguration &cfg : config_) { if (!cfg.colorSpace) continue; if (cfg.colorSpace->adjust(cfg.pixelFormat)) status = Adjusted; if (cfg.colorSpace != ColorSpace::Raw && cfg.size > size) { colorSpace = cfg.colorSpace; size = cfg.size; } } if (!colorSpace || !(flags & ColorSpaceFlag::StreamsShareColorSpace)) return status; /* Make all output color spaces the same, if requested. */ for (auto &cfg : config_) { if (cfg.colorSpace != ColorSpace::Raw && cfg.colorSpace != colorSpace) { cfg.colorSpace = colorSpace; status = Adjusted; } } return status; } /** * \var CameraConfiguration::sensorConfig * \brief The camera sensor configuration * * The sensorConfig member allows manual control of the configuration of the * camera sensor. By default, if sensorConfig is not set, the camera will * configure the sensor automatically based on the configuration of the streams. * Applications can override this by manually specifying the full sensor * configuration. * * Refer to the camera-sensor-model documentation and to the SensorConfiguration * class documentation for details about the sensor configuration process. * * The camera sensor configuration applies to all streams produced by a camera * from the same image source. */ /** * \var CameraConfiguration::orientation * \brief The desired orientation of the images produced by the camera * * The orientation field is a user-specified 2D plane transformation that * specifies how the application wants the camera images to be rotated in * the memory buffers. * * If the orientation requested by the application cannot be obtained, the * camera will not rotate or flip the images, and the validate() function will * Adjust this value to the native image orientation produced by the camera. * * By default the orientation field is set to Orientation::Rotate0. */ /** * \var CameraConfiguration::config_ * \brief The vector of stream configurations */ /** * \class Camera::Private * \brief Base class for camera private data * * The Camera::Private class stores all private data associated with a camera. * In addition to hiding core Camera data from the public API, it is expected to * be subclassed by pipeline handlers to store pipeline-specific data. * * Pipeline handlers can obtain the Camera::Private instance associated with a * camera by calling Camera::_d(). */ /** * \brief Construct a Camera::Private instance * \param[in] pipe The pipeline handler responsible for the camera device */ Camera::Private::Private(PipelineHandler *pipe) : requestSequence_(0), pipe_(pipe->shared_from_this()), disconnected_(false), state_(CameraAvailable) { } Camera::Private::~Private() { if (state_.load(std::memory_order_acquire) != Private::CameraAvailable) LOG(Camera, Error) << "Removing camera while still in use"; } /** * \fn Camera::Private::pipe() * \brief Retrieve the pipeline handler related to this camera * \return The pipeline handler that created this camera */ /** * \fn Camera::Private::validator() * \brief Retrieve the control validator related to this camera * \return The control validator associated with this camera */ /** * \var Camera::Private::queuedRequests_ * \brief The list of queued and not yet completed requests * * This list tracks requests queued in order to ensure completion of all * requests when the pipeline handler is stopped. * * \sa PipelineHandler::queueRequest(), PipelineHandler::stop(), * PipelineHandler::completeRequest() */ /** * \var Camera::Private::controlInfo_ * \brief The set of controls supported by the camera * * The control information shall be initialised by the pipeline handler when * creating the camera. * * \todo This member was initially meant to stay constant after the camera is * created. Several pipeline handlers are already updating it when the camera * is configured. Update the documentation accordingly, and possibly the API as * well, when implementing official support for control info updates. */ /** * \var Camera::Private::properties_ * \brief The list of properties supported by the camera * * The list of camera properties shall be initialised by the pipeline handler * when creating the camera, and shall not be modified afterwards. */ /** * \var Camera::Private::requestSequence_ * \brief The queuing sequence number of the request * * When requests are queued, they are given a per-camera sequence number to * facilitate debugging of internal request usage. * * The requestSequence_ tracks the number of requests queued to a camera * over a single capture session. */ static const char *const camera_state_names[] = { "Available", "Acquired", "Configured", "Stopping", "Running", }; bool Camera::Private::isAcquired() const { return state_.load(std::memory_order_acquire) != CameraAvailable; } bool Camera::Private::isRunning() const { return state_.load(std::memory_order_acquire) == CameraRunning; } int Camera::Private::isAccessAllowed(State state, bool allowDisconnected, const char *from) const { if (!allowDisconnected && disconnected_) return -ENODEV; State currentState = state_.load(std::memory_order_acquire); if (currentState == state) return 0; ASSERT(static_cast<unsigned int>(state) < std::size(camera_state_names)); LOG(Camera, Error) << "Camera in " << camera_state_names[currentState] << " state trying " << from << "() requiring state " << camera_state_names[state]; return -EACCES; } int Camera::Private::isAccessAllowed(State low, State high, bool allowDisconnected, const char *from) const { if (!allowDisconnected && disconnected_) return -ENODEV; State currentState = state_.load(std::memory_order_acquire); if (currentState >= low && currentState <= high) return 0; ASSERT(static_cast<unsigned int>(low) < std::size(camera_state_names) && static_cast<unsigned int>(high) < std::size(camera_state_names)); LOG(Camera, Error) << "Camera in " << camera_state_names[currentState] << " state trying " << from << "() requiring state between " << camera_state_names[low] << " and " << camera_state_names[high]; return -EACCES; } void Camera::Private::disconnect() { /* * If the camera was running when the hardware was removed force the * state to Configured state to allow applications to free resources * and call release() before deleting the camera. */ if (state_.load(std::memory_order_acquire) == Private::CameraRunning) state_.store(Private::CameraConfigured, std::memory_order_release); disconnected_ = true; } void Camera::Private::setState(State state) { state_.store(state, std::memory_order_release); } /** * \class Camera * \brief Camera device * * \todo Add documentation for camera start timings. What exactly does the * camera expect the pipeline handler to do when start() is called? * * The Camera class models a camera capable of producing one or more image * streams from a single image source. It provides the main interface to * configuring and controlling the device, and capturing image streams. It is * the central object exposed by libcamera. * * To support the central nature of Camera objects, libcamera manages the * lifetime of camera instances with std::shared_ptr<>. Instances shall be * created with the create() function which returns a shared pointer. The * Camera constructors and destructor are private, to prevent instances from * being constructed and destroyed manually. * * \section camera_operation Operating the Camera * * An application needs to perform a sequence of operations on a camera before * it is ready to process requests. The camera needs to be acquired and * configured to prepare the camera for capture. Once started the camera can * process requests until it is stopped. When an application is done with a * camera, the camera needs to be released. * * An application may start and stop a camera multiple times as long as it is * not released. The camera may also be reconfigured. * * Functions that affect the camera state as defined below are generally not * synchronized with each other by the Camera class. The caller is responsible * for ensuring their synchronization if necessary. * * \subsection Camera States * * To help manage the sequence of operations needed to control the camera a set * of states are defined. Each state describes which operations may be performed * on the camera. Performing an operation not allowed in the camera state * results in undefined behaviour. Operations not listed at all in the state * diagram are allowed in all states. * * \dot * digraph camera_state_machine { * node [shape = doublecircle ]; Available; * node [shape = circle ]; Acquired; * node [shape = circle ]; Configured; * node [shape = circle ]; Stopping; * node [shape = circle ]; Running; * * Available -> Available [label = "release()"]; * Available -> Acquired [label = "acquire()"]; * * Acquired -> Available [label = "release()"]; * Acquired -> Configured [label = "configure()"]; * * Configured -> Available [label = "release()"]; * Configured -> Configured [label = "configure(), createRequest()"]; * Configured -> Running [label = "start()"]; * * Running -> Stopping [label = "stop()"]; * Stopping -> Configured; * Running -> Running [label = "createRequest(), queueRequest()"]; * } * \enddot * * \subsubsection Available * The base state of a camera, an application can inspect the properties of the * camera to determine if it wishes to use it. If an application wishes to use * a camera it should acquire() it to proceed to the Acquired state. * * \subsubsection Acquired * In the acquired state an application has exclusive access to the camera and * may modify the camera's parameters to configure it and proceed to the * Configured state. * * \subsubsection Configured * The camera is configured and ready to be started. The application may * release() the camera and to get back to the Available state or start() * it to progress to the Running state. * * \subsubsection Stopping * The camera has been asked to stop. Pending requests are being completed or * cancelled, and no new requests are permitted to be queued. The camera will * transition to the Configured state when all queued requests have been * returned to the application. * * \subsubsection Running * The camera is running and ready to process requests queued by the * application. The camera remains in this state until it is stopped and moved * to the Configured state. */ /** * \brief Create a camera instance * \param[in] d Camera private data * \param[in] id The ID of the camera device * \param[in] streams Array of streams the camera provides * * The caller is responsible for guaranteeing a stable and unique camera ID * matching the constraints described by Camera::id(). Parameters that are * allocated dynamically at system startup, such as bus numbers that may be * enumerated differently, are therefore not suitable to use in the ID. * * Pipeline handlers that use a CameraSensor may use the CameraSensor::id() to * generate an ID that satisfies the criteria of a stable and unique camera ID. * * \return A shared pointer to the newly created camera object */ std::shared_ptr<Camera> Camera::create(std::unique_ptr<Private> d, const std::string &id, const std::set<Stream *> &streams) { ASSERT(d); struct Deleter : std::default_delete<Camera> { void operator()(Camera *camera) { if (Thread::current() == camera->thread()) delete camera; else camera->deleteLater(); } }; Camera *camera = new Camera(std::move(d), id, streams); return std::shared_ptr<Camera>(camera, Deleter()); } /** * \brief Retrieve the ID of the camera * * The camera ID is a free-form string that identifies a camera in the system. * IDs are guaranteed to be unique and stable: the same camera, when connected * to the system in the same way (e.g. in the same USB port), will have the same * ID across both unplug/replug and system reboots. * * Applications may store the camera ID and use it later to acquire the same * camera. They shall treat the ID as an opaque identifier, without interpreting * its value. * * Camera IDs may change when the system hardware or firmware is modified, for * instance when replacing a PCI USB controller or moving it to another PCI * slot, or updating the ACPI tables or Device Tree. * * \context This function is \threadsafe. * * \return ID of the camera device */ const std::string &Camera::id() const { return _d()->id_; } /** * \var Camera::bufferCompleted * \brief Signal emitted when a buffer for a request queued to the camera has * completed */ /** * \var Camera::requestCompleted * \brief Signal emitted when a request queued to the camera has completed */ /** * \var Camera::disconnected * \brief Signal emitted when the camera is disconnected from the system * * This signal is emitted when libcamera detects that the camera has been * removed from the system. For hot-pluggable devices this is usually caused by * physical device disconnection. The media device is passed as a parameter. * * As soon as this signal is emitted the camera instance will refuse all new * application API calls by returning errors immediately. */ Camera::Camera(std::unique_ptr<Private> d, const std::string &id, const std::set<Stream *> &streams) : Extensible(std::move(d)) { _d()->id_ = id; _d()->streams_ = streams; _d()->validator_ = std::make_unique<CameraControlValidator>(this); } Camera::~Camera() { } /** * \brief Notify camera disconnection * * This function is used to notify the camera instance that the underlying * hardware has been unplugged. In response to the disconnection the camera * instance notifies the application by emitting the #disconnected signal, and * ensures that all new calls to the application-facing Camera API return an * error immediately. * * \todo Deal with pending requests if the camera is disconnected in a * running state. */ void Camera::disconnect() { LOG(Camera, Debug) << "Disconnecting camera " << id(); _d()->disconnect(); disconnected.emit(); } int Camera::exportFrameBuffers(Stream *stream, std::vector<std::unique_ptr<FrameBuffer>> *buffers) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraConfigured); if (ret < 0) return ret; if (streams().find(stream) == streams().end()) return -EINVAL; if (d->activeStreams_.find(stream) == d->activeStreams_.end()) return -EINVAL; return d->pipe_->invokeMethod(&PipelineHandler::exportFrameBuffers, ConnectionTypeBlocking, this, stream, buffers); } /** * \brief Acquire the camera device for exclusive access * * After opening the device with open(), exclusive access must be obtained * before performing operations that change the device state. This function is * not blocking, if the device has already been acquired (by the same or another * process) the -EBUSY error code is returned. * * Acquiring a camera may limit usage of any other camera(s) provided by the * same pipeline handler to the same instance of libcamera. The limit is in * effect until all cameras from the pipeline handler are released. Other * instances of libcamera can still list and examine the cameras but will fail * if they attempt to acquire() any of them. * * Once exclusive access isn't needed anymore, the device should be released * with a call to the release() function. * * \context This function is \threadsafe. It may only be called when the camera * is in the Available state as defined in \ref camera_operation. * * \return 0 on success or a negative error code otherwise * \retval -ENODEV The camera has been disconnected from the system * \retval -EBUSY The camera is not free and can't be acquired by the caller */ int Camera::acquire() { Private *const d = _d(); /* * No manual locking is required as PipelineHandler::lock() is * thread-safe. */ int ret = d->isAccessAllowed(Private::CameraAvailable); if (ret < 0) return ret == -EACCES ? -EBUSY : ret; if (!d->pipe_->acquire()) { LOG(Camera, Info) << "Pipeline handler in use by another process"; return -EBUSY; } d->setState(Private::CameraAcquired); return 0; } /** * \brief Release exclusive access to the camera device * * Releasing the camera device allows other users to acquire exclusive access * with the acquire() function. * * \context This function may only be called when the camera is in the * Available or Configured state as defined in \ref camera_operation, and shall * be synchronized by the caller with other functions that affect the camera * state. * * \return 0 on success or a negative error code otherwise * \retval -EBUSY The camera is running and can't be released */ int Camera::release() { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraAvailable, Private::CameraConfigured, true); if (ret < 0) return ret == -EACCES ? -EBUSY : ret; if (d->isAcquired()) d->pipe_->release(this); d->setState(Private::CameraAvailable); return 0; } /** * \brief Retrieve the list of controls supported by the camera * * The list of controls supported by the camera and their associated * constraints remain constant through the lifetime of the Camera object. * * \context This function is \threadsafe. * * \return A ControlInfoMap listing the controls supported by the camera */ const ControlInfoMap &Camera::controls() const { return _d()->controlInfo_; } /** * \brief Retrieve the list of properties of the camera * * Camera properties are static information that describe the capabilities of * the camera. They remain constant through the lifetime of the Camera object. * * \return A ControlList of properties supported by the camera */ const ControlList &Camera::properties() const { return _d()->properties_; } /** * \brief Retrieve all the camera's stream information * * Retrieve all of the camera's static stream information. The static * information describes among other things how many streams the camera * supports and the capabilities of each stream. * * \context This function is \threadsafe. * * \return An array of all the camera's streams */ const std::set<Stream *> &Camera::streams() const { return _d()->streams_; } /** * \brief Generate a default camera configuration according to stream roles * \param[in] roles A list of stream roles * * Generate a camera configuration for a set of desired stream roles. The caller * specifies a list of stream roles and the camera returns a configuration * containing suitable streams and their suggested default configurations. An * empty list of roles is valid, and will generate an empty configuration that * can be filled by the caller. * * \context This function is \threadsafe. * * \return A CameraConfiguration if the requested roles can be satisfied, or a * null pointer otherwise. */ std::unique_ptr<CameraConfiguration> Camera::generateConfiguration(Span<const StreamRole> roles) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraAvailable, Private::CameraRunning); if (ret < 0) return nullptr; if (roles.size() > streams().size()) return nullptr; std::unique_ptr<CameraConfiguration> config = d->pipe_->generateConfiguration(this, roles); if (!config) { LOG(Camera, Debug) << "Pipeline handler failed to generate configuration"; return nullptr; } std::ostringstream msg("streams configuration:", std::ios_base::ate); if (config->empty()) msg << " empty"; for (unsigned int index = 0; index < config->size(); ++index) msg << " (" << index << ") " << config->at(index).toString(); LOG(Camera, Debug) << msg.str(); return config; } /** * \fn std::unique_ptr<CameraConfiguration> \ * Camera::generateConfiguration(std::initializer_list<StreamRole> roles) * \overload */ /** * \brief Configure the camera prior to capture * \param[in] config The camera configurations to setup * * Prior to starting capture, the camera must be configured to select a * group of streams to be involved in the capture and their configuration. * The caller specifies which streams are to be involved and their configuration * by populating \a config. * * The configuration is created by generateConfiguration(), and adjusted by the * caller with CameraConfiguration::validate(). This function only accepts fully * valid configurations and returns an error if \a config is not valid. * * Exclusive access to the camera shall be ensured by a call to acquire() prior * to calling this function, otherwise an -EACCES error will be returned. * * \context This function may only be called when the camera is in the Acquired * or Configured state as defined in \ref camera_operation, and shall be * synchronized by the caller with other functions that affect the camera * state. * * Upon return the StreamConfiguration entries in \a config are associated with * Stream instances which can be retrieved with StreamConfiguration::stream(). * * \return 0 on success or a negative error code otherwise * \retval -ENODEV The camera has been disconnected from the system * \retval -EACCES The camera is not in a state where it can be configured * \retval -EINVAL The configuration is not valid */ int Camera::configure(CameraConfiguration *config) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraAcquired, Private::CameraConfigured); if (ret < 0) return ret; for (auto it : *config) it.setStream(nullptr); if (config->validate() != CameraConfiguration::Valid) { LOG(Camera, Error) << "Can't configure camera with invalid configuration"; return -EINVAL; } std::ostringstream msg("configuring streams:", std::ios_base::ate); for (unsigned int index = 0; index < config->size(); ++index) { StreamConfiguration &cfg = config->at(index); msg << " (" << index << ") " << cfg.toString(); } LOG(Camera, Info) << msg.str(); ret = d->pipe_->invokeMethod(&PipelineHandler::configure, ConnectionTypeBlocking, this, config); if (ret) return ret; d->activeStreams_.clear(); for (const StreamConfiguration &cfg : *config) { Stream *stream = cfg.stream(); if (!stream) { LOG(Camera, Fatal) << "Pipeline handler failed to update stream configuration"; d->activeStreams_.clear(); return -EINVAL; } stream->configuration_ = cfg; d->activeStreams_.insert(stream); } d->setState(Private::CameraConfigured); return 0; } /** * \brief Create a request object for the camera * \param[in] cookie Opaque cookie for application use * * This function creates an empty request for the application to fill with * buffers and parameters, and queue for capture. * * The \a cookie is stored in the request and is accessible through the * Request::cookie() function at any time. It is typically used by applications * to map the request to an external resource in the request completion * handler, and is completely opaque to libcamera. * * The ownership of the returned request is passed to the caller, which is * responsible for deleting it. The request may be deleted in the completion * handler, or reused after resetting its state with Request::reuse(). * * \context This function is \threadsafe. It may only be called when the camera * is in the Configured or Running state as defined in \ref camera_operation. * * \return A pointer to the newly created request, or nullptr on error */ std::unique_ptr<Request> Camera::createRequest(uint64_t cookie) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraConfigured, Private::CameraRunning); if (ret < 0) return nullptr; std::unique_ptr<Request> request = std::make_unique<Request>(this, cookie); /* Associate the request with the pipeline handler. */ d->pipe_->registerRequest(request.get()); return request; } /** * \brief Queue a request to the camera * \param[in] request The request to queue to the camera * * This function queues a \a request to the camera for capture. * * After allocating the request with createRequest(), the application shall * fill it with at least one capture buffer before queuing it. Requests that * contain no buffers are invalid and are rejected without being queued. * * Once the request has been queued, the camera will notify its completion * through the \ref requestCompleted signal. * * \context This function is \threadsafe. It may only be called when the camera * is in the Running state as defined in \ref camera_operation. * * \return 0 on success or a negative error code otherwise * \retval -ENODEV The camera has been disconnected from the system * \retval -EACCES The camera is not running so requests can't be queued * \retval -EXDEV The request does not belong to this camera * \retval -EINVAL The request is invalid * \retval -ENOMEM No buffer memory was available to handle the request */ int Camera::queueRequest(Request *request) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraRunning); if (ret < 0) return ret; /* Requests can only be queued to the camera that created them. */ if (request->_d()->camera() != this) { LOG(Camera, Error) << "Request was not created by this camera"; return -EXDEV; } if (request->status() != Request::RequestPending) { LOG(Camera, Error) << request->toString() << " is not valid"; return -EINVAL; } /* * The camera state may change until the end of the function. No locking * is however needed as PipelineHandler::queueRequest() will handle * this. */ if (request->buffers().empty()) { LOG(Camera, Error) << "Request contains no buffers"; return -EINVAL; } for (auto const &it : request->buffers()) { const Stream *stream = it.first; if (d->activeStreams_.find(stream) == d->activeStreams_.end()) { LOG(Camera, Error) << "Invalid request"; return -EINVAL; } } d->pipe_->invokeMethod(&PipelineHandler::queueRequest, ConnectionTypeQueued, request); return 0; } /** * \brief Start capture from camera * \param[in] controls Controls to be applied before starting the Camera * * Start the camera capture session, optionally providing a list of controls to * apply before starting. Once the camera is started the application can queue * requests to the camera to process and return to the application until the * capture session is terminated with \a stop(). * * \context This function may only be called when the camera is in the * Configured state as defined in \ref camera_operation, and shall be * synchronized by the caller with other functions that affect the camera * state. * * \return 0 on success or a negative error code otherwise * \retval -ENODEV The camera has been disconnected from the system * \retval -EACCES The camera is not in a state where it can be started */ int Camera::start(const ControlList *controls) { Private *const d = _d(); int ret = d->isAccessAllowed(Private::CameraConfigured); if (ret < 0) return ret; LOG(Camera, Debug) << "Starting capture"; ASSERT(d->requestSequence_ == 0); ret = d->pipe_->invokeMethod(&PipelineHandler::start, ConnectionTypeBlocking, this, controls); if (ret) return ret; d->setState(Private::CameraRunning); return 0; } /** * \brief Stop capture from camera * * This function stops capturing and processing requests immediately. All * pending requests are cancelled and complete synchronously in an error state. * * \context This function may be called in any camera state as defined in \ref * camera_operation, and shall be synchronized by the caller with other * functions that affect the camera state. If called when the camera isn't * running, it is a no-op. * * \return 0 on success or a negative error code otherwise * \retval -ENODEV The camera has been disconnected from the system * \retval -EACCES The camera is not running so can't be stopped */ int Camera::stop() { Private *const d = _d(); /* * \todo Make calling stop() when not in 'Running' part of the state * machine rather than take this shortcut */ if (!d->isRunning()) return 0; int ret = d->isAccessAllowed(Private::CameraRunning); if (ret < 0) return ret; LOG(Camera, Debug) << "Stopping capture"; d->setState(Private::CameraStopping); d->pipe_->invokeMethod(&PipelineHandler::stop, ConnectionTypeBlocking, this); ASSERT(!d->pipe_->hasPendingRequests(this)); d->setState(Private::CameraConfigured); return 0; } /** * \brief Handle request completion and notify application * \param[in] request The request that has completed * * This function is called by the pipeline handler to notify the camera that * the request has completed. It emits the requestCompleted signal. */ void Camera::requestComplete(Request *request) { /* Disconnected cameras are still able to complete requests. */ if (_d()->isAccessAllowed(Private::CameraStopping, Private::CameraRunning, true)) LOG(Camera, Fatal) << "Trying to complete a request when stopped"; requestCompleted.emit(request); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/pub_key.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Public key signature verification */ #include "libcamera/internal/pub_key.h" #if HAVE_CRYPTO #include <openssl/evp.h> #include <openssl/rsa.h> #include <openssl/sha.h> #include <openssl/x509.h> #elif HAVE_GNUTLS #include <gnutls/abstract.h> #endif /** * \file pub_key.h * \brief Public key signature verification */ namespace libcamera { /** * \class PubKey * \brief Public key wrapper for signature verification * * The PubKey class wraps a public key and implements signature verification. It * only supports RSA keys and the RSA-SHA256 signature algorithm. */ /** * \brief Construct a PubKey from key data * \param[in] key Key data encoded in DER format */ PubKey::PubKey([[maybe_unused]] Span<const uint8_t> key) : valid_(false) { #if HAVE_CRYPTO const uint8_t *data = key.data(); pubkey_ = d2i_PUBKEY(nullptr, &data, key.size()); if (!pubkey_) return; valid_ = true; #elif HAVE_GNUTLS int ret = gnutls_pubkey_init(&pubkey_); if (ret < 0) return; const gnutls_datum_t gnuTlsKey{ const_cast<unsigned char *>(key.data()), static_cast<unsigned int>(key.size()) }; ret = gnutls_pubkey_import(pubkey_, &gnuTlsKey, GNUTLS_X509_FMT_DER); if (ret < 0) return; valid_ = true; #endif } PubKey::~PubKey() { #if HAVE_CRYPTO EVP_PKEY_free(pubkey_); #elif HAVE_GNUTLS gnutls_pubkey_deinit(pubkey_); #endif } /** * \fn bool PubKey::isValid() const * \brief Check is the public key is valid * \return True if the public key is valid, false otherwise */ /** * \brief Verify signature on data * \param[in] data The signed data * \param[in] sig The signature * * Verify that the signature \a sig matches the signed \a data for the public * key. The signture algorithm is hardcoded to RSA-SHA256. * * \return True if the signature is valid, false otherwise */ bool PubKey::verify([[maybe_unused]] Span<const uint8_t> data, [[maybe_unused]] Span<const uint8_t> sig) const { if (!valid_) return false; #if HAVE_CRYPTO /* * Create and initialize a public key algorithm context for signature * verification. */ EVP_PKEY_CTX *ctx = EVP_PKEY_CTX_new(pubkey_, nullptr); if (!ctx) return false; if (EVP_PKEY_verify_init(ctx) <= 0 || EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING) <= 0 || EVP_PKEY_CTX_set_signature_md(ctx, EVP_sha256()) <= 0) { EVP_PKEY_CTX_free(ctx); return false; } /* Calculate the SHA256 digest of the data. */ uint8_t digest[SHA256_DIGEST_LENGTH]; SHA256(data.data(), data.size(), digest); /* Decrypt the signature and verify it matches the digest. */ int ret = EVP_PKEY_verify(ctx, sig.data(), sig.size(), digest, SHA256_DIGEST_LENGTH); EVP_PKEY_CTX_free(ctx); return ret == 1; #elif HAVE_GNUTLS const gnutls_datum_t gnuTlsData{ const_cast<unsigned char *>(data.data()), static_cast<unsigned int>(data.size()) }; const gnutls_datum_t gnuTlsSig{ const_cast<unsigned char *>(sig.data()), static_cast<unsigned int>(sig.size()) }; int ret = gnutls_pubkey_verify_data2(pubkey_, GNUTLS_SIGN_RSA_SHA256, 0, &gnuTlsData, &gnuTlsSig); return ret >= 0; #else return false; #endif } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/byte_stream_buffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Byte stream buffer */ #include "libcamera/internal/byte_stream_buffer.h" #include <stdint.h> #include <string.h> #include <libcamera/base/log.h> /** * \file byte_stream_buffer.h * \brief Managed memory container for serialized data */ namespace libcamera { LOG_DEFINE_CATEGORY(Serialization) /** * \class ByteStreamBuffer * \brief Wrap a memory buffer and provide sequential data read and write * * The ByteStreamBuffer class wraps a memory buffer and exposes sequential read * and write operation with integrated boundary checks. Access beyond the end * of the buffer are blocked and logged, allowing error checks to take place at * the of of access operations instead of at each access. This simplifies * serialization and deserialization of data. * * A byte stream buffer is created with a base memory pointer and a size. If the * memory pointer is const, the buffer operates in read-only mode, and write * operations are denied. Otherwise the buffer operates in write-only mode, and * read operations are denied. * * Once a buffer is created, data is read or written with read() and write() * respectively. Access is strictly sequential, the buffer keeps track of the * current access location and advances it automatically. Reading or writing * the same location multiple times is thus not possible. Bytes may also be * skipped with the skip() function. * * * The ByteStreamBuffer also supports carving out pieces of memory into other * ByteStreamBuffer instances. Like a read or write operation, a carveOut() * advances the internal access location, but allows the carved out memory to * be accessed at a later time. * * All accesses beyond the end of the buffer (read, write, skip or carve out) * are blocked. The first of such accesses causes a message to be logged, and * the buffer being marked as having overflown. If the buffer has been carved * out from a parent buffer, the parent buffer is also marked as having * overflown. Any later access on an overflown buffer is blocked. The buffer * overflow status can be checked with the overflow() function. */ /** * \brief Construct a read ByteStreamBuffer from the memory area \a base * of \a size * \param[in] base The address of the memory area to wrap * \param[in] size The size of the memory area to wrap */ ByteStreamBuffer::ByteStreamBuffer(const uint8_t *base, size_t size) : parent_(nullptr), base_(base), size_(size), overflow_(false), read_(base), write_(nullptr) { } /** * \brief Construct a write ByteStreamBuffer from the memory area \a base * of \a size * \param[in] base The address of the memory area to wrap * \param[in] size The size of the memory area to wrap */ ByteStreamBuffer::ByteStreamBuffer(uint8_t *base, size_t size) : parent_(nullptr), base_(base), size_(size), overflow_(false), read_(nullptr), write_(base) { } /** * \brief Construct a ByteStreamBuffer from the contents of \a other using move * semantics * \param[in] other The other buffer * * After the move construction the \a other buffer is invalidated. Any attempt * to access its contents will be considered as an overflow. */ ByteStreamBuffer::ByteStreamBuffer(ByteStreamBuffer &&other) { *this = std::move(other); } /** * \brief Replace the contents of the buffer with those of \a other using move * semantics * \param[in] other The other buffer * * After the assignment the \a other buffer is invalidated. Any attempt to * access its contents will be considered as an overflow. */ ByteStreamBuffer &ByteStreamBuffer::operator=(ByteStreamBuffer &&other) { parent_ = other.parent_; base_ = other.base_; size_ = other.size_; overflow_ = other.overflow_; read_ = other.read_; write_ = other.write_; other.parent_ = nullptr; other.base_ = nullptr; other.size_ = 0; other.overflow_ = false; other.read_ = nullptr; other.write_ = nullptr; return *this; } /** * \fn ByteStreamBuffer::base() * \brief Retrieve a pointer to the start location of the managed memory buffer * \return A pointer to the managed memory buffer */ /** * \fn ByteStreamBuffer::offset() * \brief Retrieve the offset of the current access location from the base * \return The offset in bytes */ /** * \fn ByteStreamBuffer::size() * \brief Retrieve the size of the managed memory buffer * \return The size of managed memory buffer */ /** * \fn ByteStreamBuffer::overflow() * \brief Check if the buffer has overflown * \return True if the buffer has overflow, false otherwise */ void ByteStreamBuffer::setOverflow() { if (parent_) parent_->setOverflow(); overflow_ = true; } /** * \brief Carve out an area of \a size bytes into a new ByteStreamBuffer * \param[in] size The size of the newly created memory buffer * * This function carves out an area of \a size bytes from the buffer into a new * ByteStreamBuffer, and returns the new buffer. It operates identically to a * read or write access from the point of view of the current buffer, but allows * the new buffer to be read or written at a later time after other read or * write accesses on the current buffer. * * \return A newly created ByteStreamBuffer of \a size */ ByteStreamBuffer ByteStreamBuffer::carveOut(size_t size) { if (!size_ || overflow_) return ByteStreamBuffer(static_cast<const uint8_t *>(nullptr), 0); const uint8_t *curr = read_ ? read_ : write_; if (curr + size > base_ + size_) { LOG(Serialization, Error) << "Unable to reserve " << size << " bytes"; setOverflow(); return ByteStreamBuffer(static_cast<const uint8_t *>(nullptr), 0); } if (read_) { ByteStreamBuffer b(read_, size); b.parent_ = this; read_ += size; return b; } else { ByteStreamBuffer b(write_, size); b.parent_ = this; write_ += size; return b; } } /** * \brief Skip \a size bytes from the buffer * \param[in] size The number of bytes to skip * * This function skips the next \a size bytes from the buffer. * * \return 0 on success, a negative error code otherwise * \retval -ENOSPC no more space is available in the managed memory buffer */ int ByteStreamBuffer::skip(size_t size) { if (overflow_) return -ENOSPC; const uint8_t *curr = read_ ? read_ : write_; if (curr + size > base_ + size_) { LOG(Serialization, Error) << "Unable to skip " << size << " bytes"; setOverflow(); return -ENOSPC; } if (read_) { read_ += size; } else { memset(write_, 0, size); write_ += size; } return 0; } /** * \fn template<typename T> int ByteStreamBuffer::read(T *t) * \brief Read data from the managed memory buffer into \a t * \param[out] t Pointer to the memory containing the read data * \return 0 on success, a negative error code otherwise * \retval -EACCES attempting to read from a write buffer * \retval -ENOSPC no more space is available in the managed memory buffer */ /** * \fn template<typename T> int ByteStreamBuffer::read(const Span<T> &data) * \brief Read data from the managed memory buffer into Span \a data * \param[out] data Span representing the destination memory * \return 0 on success, a negative error code otherwise * \retval -EACCES attempting to read from a write buffer * \retval -ENOSPC no more space is available in the managed memory buffer */ /** * \fn template<typename T> const T *ByteStreamBuffer::read(size_t count) * \brief Read data from the managed memory buffer without performing a copy * \param[in] count Number of data items to read * * This function reads \a count elements of type \a T from the buffer. Unlike * the other read variants, it doesn't copy the data but returns a pointer to * the first element. If data can't be read for any reason (usually due to * reading more data than available), the function returns nullptr. * * \return A pointer to the data on success, or nullptr otherwise */ /** * \fn template<typename T> int ByteStreamBuffer::write(const T *t) * \brief Write \a t to the managed memory buffer * \param[in] t The data to write to memory * \return 0 on success, a negative error code otherwise * \retval -EACCES attempting to write to a read buffer * \retval -ENOSPC no more space is available in the managed memory buffer */ /** * \fn template<typename T> int ByteStreamBuffer::write(const Span<T> &data) * \brief Write \a data to the managed memory buffer * \param[in] data The data to write to memory * \return 0 on success, a negative error code otherwise * \retval -EACCES attempting to write to a read buffer * \retval -ENOSPC no more space is available in the managed memory buffer */ const uint8_t *ByteStreamBuffer::read(size_t size, size_t count) { if (!read_) return nullptr; if (overflow_) return nullptr; size_t bytes; if (__builtin_mul_overflow(size, count, &bytes)) { setOverflow(); return nullptr; } if (read_ + bytes > base_ + size_) { LOG(Serialization, Error) << "Unable to read " << bytes << " bytes: out of bounds"; setOverflow(); return nullptr; } const uint8_t *data = read_; read_ += bytes; return data; } int ByteStreamBuffer::read(uint8_t *data, size_t size) { if (!read_) return -EACCES; if (overflow_) return -ENOSPC; if (read_ + size > base_ + size_) { LOG(Serialization, Error) << "Unable to read " << size << " bytes: out of bounds"; setOverflow(); return -ENOSPC; } memcpy(data, read_, size); read_ += size; return 0; } int ByteStreamBuffer::write(const uint8_t *data, size_t size) { if (!write_) return -EACCES; if (overflow_) return -ENOSPC; if (write_ + size > base_ + size_) { LOG(Serialization, Error) << "Unable to write " << size << " bytes: no space left"; setOverflow(); return -ENOSPC; } memcpy(write_, data, size); write_ += size; return 0; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/camera_controls.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Camera controls */ #include "libcamera/internal/camera_controls.h" #include <libcamera/camera.h> #include <libcamera/controls.h> /** * \file camera_controls.h * \brief Controls for Camera instances */ namespace libcamera { /** * \class CameraControlValidator * \brief A control validator for Camera instances * * This ControlValidator specialisation validates that controls exist in the * Camera associated with the validator. */ /** * \brief Construst a CameraControlValidator for the \a camera * \param[in] camera The camera */ CameraControlValidator::CameraControlValidator(Camera *camera) : camera_(camera) { } const std::string &CameraControlValidator::name() const { return camera_->id(); } /** * \brief Validate a control * \param[in] id The control ID * \return True if the control is valid, false otherwise */ bool CameraControlValidator::validate(unsigned int id) const { const ControlInfoMap &controls = camera_->controls(); return controls.find(id) != controls.end(); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/fence.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Synchronization fence */ #include "libcamera/fence.h" namespace libcamera { /** * * \file libcamera/fence.h * \brief Definition of the Fence class */ /** * \class Fence * \brief Synchronization primitive to manage resources * * The Fence class models a synchronization primitive that can be used by * applications to explicitly synchronize resource usage, and can be shared by * multiple processes. * * Fences are most commonly used in association with frame buffers. A * FrameBuffer can be associated with a Fence so that the library can wait for * the Fence to be signalled before allowing the camera device to actually * access the memory area described by the FrameBuffer. * * \sa Request::addBuffer() * * By using a fence, applications can then synchronize between frame buffer * consumers and producers, as for example a display device and a camera, to * guarantee that a new data transfers only happen once the existing frames have * been displayed. * * A Fence can be realized by different event notification primitives, the most * common of which is represented by waiting for read events to happen on a * <a href="https://www.kernel.org/doc/html/latest/driver-api/sync_file.html">kernel sync file.</a> * This is currently the only mechanism supported by libcamera, but others can * be implemented by extending or subclassing this class and implementing * opportune handling in the core library. * * \internal * * The Fence class is a thin abstraction around a UniqueFD which simply allows * to access it as a const reference or to move its ownership to the caller. * * The usage of the Fence class allows to abstract the underlying * synchronization mechanism in use and implement an interface towards other * library components that will not change when new synchronization primitives * will be added as fences. * * A Fence is constructed with a UniqueFD whose ownership is moved in the Fence. * A FrameBuffer can be associated with a Fence by passing it to the * Request::addBuffer() function, which will move the Fence into the FrameBuffer * itself. Once a Request is queued to the Camera, a preparation phase * guarantees that before actually applying the Request to the hardware, all the * valid fences of the frame buffers in a Request are correctly signalled. Once * a Fence has completed, the library will release the FrameBuffer fence so that * application won't be allowed to access it. * * An optional timeout can be started while waiting for a fence to complete. If * waiting on a Fence fails for whatever reason, the FrameBuffer's fence is not * reset and is made available to application for them to handle it, by * releasing the Fence to correctly close the underlying UniqueFD. * * A failure in waiting for a Fence to complete will result in the Request to * complete in failed state. * * \sa Request::prepare() * \sa PipelineHandler::doQueueRequests() */ /** * \brief Create a Fence * \param[in] fd The fence file descriptor * * The file descriptor ownership is moved to the Fence. */ Fence::Fence(UniqueFD fd) : fd_(std::move(fd)) { } /** * \fn Fence::isValid() * \brief Check if a Fence is valid * * A Fence is valid if the file descriptor it wraps is valid. * * \return True if the Fence is valid, false otherwise */ /** * \fn Fence::fd() * \brief Retrieve a constant reference to the file descriptor * \return A const reference to the fence file descriptor */ /** * \fn Fence::release() * \brief Release the ownership of the file descriptor * * Release the ownership of the wrapped file descriptor by returning it to the * caller. * * \return The wrapper UniqueFD */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/v4l2_subdevice.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * V4L2 Subdevice */ #include "libcamera/internal/v4l2_subdevice.h" #include <fcntl.h> #include <iomanip> #include <regex> #include <sstream> #include <string.h> #include <sys/ioctl.h> #include <unistd.h> #include <linux/media-bus-format.h> #include <linux/v4l2-subdev.h> #include <libcamera/geometry.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/media_device.h" #include "libcamera/internal/media_object.h" /** * \file v4l2_subdevice.h * \brief V4L2 Subdevice API */ namespace libcamera { LOG_DECLARE_CATEGORY(V4L2) /** * \class MediaBusFormatInfo * \brief Information about media bus formats * * The MediaBusFormatInfo class groups together information describing a media * bus format. It facilitates handling of media bus formats by providing data * commonly used in pipeline handlers. * * \var MediaBusFormatInfo::name * \brief The format name as a human-readable string, used as the text * representation of the format * * \var MediaBusFormatInfo::code * \brief The media bus format code described by this instance (MEDIA_BUS_FMT_*) * * \var MediaBusFormatInfo::type * \brief The media bus format type * * \var MediaBusFormatInfo::bitsPerPixel * \brief The average number of bits per pixel * * The number of bits per pixel averages the total number of bits for all * colour components over the whole image, excluding any padding bits or * padding pixels. * * For formats that transmit multiple or fractional pixels per sample, the * value will differ from the bus width. * * Formats that don't have a fixed number of bits per pixel, such as compressed * formats, or device-specific embedded data formats, report 0 in this field. * * \var MediaBusFormatInfo::colourEncoding * \brief The colour encoding type * * This field is valid for Type::Image formats only. */ /** * \enum MediaBusFormatInfo::Type * \brief The format type * * \var MediaBusFormatInfo::Type::Image * \brief The format describes image data * * \var MediaBusFormatInfo::Type::Metadata * \brief The format describes generic metadata * * \var MediaBusFormatInfo::Type::EmbeddedData * \brief The format describes sensor embedded data */ namespace { const std::map<uint32_t, MediaBusFormatInfo> mediaBusFormatInfo{ /* This table is sorted to match the order in linux/media-bus-format.h */ { MEDIA_BUS_FMT_RGB444_2X8_PADHI_BE, { .name = "RGB444_2X8_PADHI_BE", .code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE, { .name = "RGB444_2X8_PADHI_LE", .code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE, { .name = "RGB555_2X8_PADHI_BE", .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE, { .name = "RGB555_2X8_PADHI_LE", .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB565_1X16, { .name = "RGB565_1X16", .code = MEDIA_BUS_FMT_RGB565_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_BGR565_2X8_BE, { .name = "BGR565_2X8_BE", .code = MEDIA_BUS_FMT_BGR565_2X8_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_BGR565_2X8_LE, { .name = "BGR565_2X8_LE", .code = MEDIA_BUS_FMT_BGR565_2X8_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB565_2X8_BE, { .name = "RGB565_2X8_BE", .code = MEDIA_BUS_FMT_RGB565_2X8_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB565_2X8_LE, { .name = "RGB565_2X8_LE", .code = MEDIA_BUS_FMT_RGB565_2X8_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB666_1X18, { .name = "RGB666_1X18", .code = MEDIA_BUS_FMT_RGB666_1X18, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 18, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_BGR888_1X24, { .name = "BGR888_1X24", .code = MEDIA_BUS_FMT_BGR888_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB888_1X24, { .name = "RGB888_1X24", .code = MEDIA_BUS_FMT_RGB888_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB888_2X12_BE, { .name = "RGB888_2X12_BE", .code = MEDIA_BUS_FMT_RGB888_2X12_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_RGB888_2X12_LE, { .name = "RGB888_2X12_LE", .code = MEDIA_BUS_FMT_RGB888_2X12_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_ARGB8888_1X32, { .name = "ARGB8888_1X32", .code = MEDIA_BUS_FMT_ARGB8888_1X32, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_Y8_1X8, { .name = "Y8_1X8", .code = MEDIA_BUS_FMT_Y8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UV8_1X8, { .name = "UV8_1X8", .code = MEDIA_BUS_FMT_UV8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY8_1_5X8, { .name = "UYVY8_1_5X8", .code = MEDIA_BUS_FMT_UYVY8_1_5X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY8_1_5X8, { .name = "VYUY8_1_5X8", .code = MEDIA_BUS_FMT_VYUY8_1_5X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV8_1_5X8, { .name = "YUYV8_1_5X8", .code = MEDIA_BUS_FMT_YUYV8_1_5X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU8_1_5X8, { .name = "YVYU8_1_5X8", .code = MEDIA_BUS_FMT_YVYU8_1_5X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY8_2X8, { .name = "UYVY8_2X8", .code = MEDIA_BUS_FMT_UYVY8_2X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY8_2X8, { .name = "VYUY8_2X8", .code = MEDIA_BUS_FMT_VYUY8_2X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV8_2X8, { .name = "YUYV8_2X8", .code = MEDIA_BUS_FMT_YUYV8_2X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU8_2X8, { .name = "YVYU8_2X8", .code = MEDIA_BUS_FMT_YVYU8_2X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_Y10_1X10, { .name = "Y10_1X10", .code = MEDIA_BUS_FMT_Y10_1X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY10_2X10, { .name = "UYVY10_2X10", .code = MEDIA_BUS_FMT_UYVY10_2X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY10_2X10, { .name = "VYUY10_2X10", .code = MEDIA_BUS_FMT_VYUY10_2X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV10_2X10, { .name = "YUYV10_2X10", .code = MEDIA_BUS_FMT_YUYV10_2X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU10_2X10, { .name = "YVYU10_2X10", .code = MEDIA_BUS_FMT_YVYU10_2X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_Y12_1X12, { .name = "Y12_1X12", .code = MEDIA_BUS_FMT_Y12_1X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_Y16_1X16, { .name = "Y16_1X16", .code = MEDIA_BUS_FMT_Y16_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY8_1X16, { .name = "UYVY8_1X16", .code = MEDIA_BUS_FMT_UYVY8_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY8_1X16, { .name = "VYUY8_1X16", .code = MEDIA_BUS_FMT_VYUY8_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV8_1X16, { .name = "YUYV8_1X16", .code = MEDIA_BUS_FMT_YUYV8_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU8_1X16, { .name = "YVYU8_1X16", .code = MEDIA_BUS_FMT_YVYU8_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YDYUYDYV8_1X16, { .name = "YDYUYDYV8_1X16", .code = MEDIA_BUS_FMT_YDYUYDYV8_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY10_1X20, { .name = "UYVY10_1X20", .code = MEDIA_BUS_FMT_UYVY10_1X20, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY10_1X20, { .name = "VYUY10_1X20", .code = MEDIA_BUS_FMT_VYUY10_1X20, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV10_1X20, { .name = "YUYV10_1X20", .code = MEDIA_BUS_FMT_YUYV10_1X20, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU10_1X20, { .name = "YVYU10_1X20", .code = MEDIA_BUS_FMT_YVYU10_1X20, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 20, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUV8_1X24, { .name = "YUV8_1X24", .code = MEDIA_BUS_FMT_YUV8_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUV10_1X30, { .name = "YUV10_1X30", .code = MEDIA_BUS_FMT_YUV10_1X30, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 30, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_AYUV8_1X32, { .name = "AYUV8_1X32", .code = MEDIA_BUS_FMT_AYUV8_1X32, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY12_2X12, { .name = "UYVY12_2X12", .code = MEDIA_BUS_FMT_UYVY12_2X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY12_2X12, { .name = "VYUY12_2X12", .code = MEDIA_BUS_FMT_VYUY12_2X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV12_2X12, { .name = "YUYV12_2X12", .code = MEDIA_BUS_FMT_YUYV12_2X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU12_2X12, { .name = "YVYU12_2X12", .code = MEDIA_BUS_FMT_YVYU12_2X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_UYVY12_1X24, { .name = "UYVY12_1X24", .code = MEDIA_BUS_FMT_UYVY12_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_VYUY12_1X24, { .name = "VYUY12_1X24", .code = MEDIA_BUS_FMT_VYUY12_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YUYV12_1X24, { .name = "YUYV12_1X24", .code = MEDIA_BUS_FMT_YUYV12_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_YVYU12_1X24, { .name = "YVYU12_1X24", .code = MEDIA_BUS_FMT_YVYU12_1X24, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_SBGGR8_1X8, { .name = "SBGGR8_1X8", .code = MEDIA_BUS_FMT_SBGGR8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG8_1X8, { .name = "SGBRG8_1X8", .code = MEDIA_BUS_FMT_SGBRG8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG8_1X8, { .name = "SGRBG8_1X8", .code = MEDIA_BUS_FMT_SGRBG8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB8_1X8, { .name = "SRGGB8_1X8", .code = MEDIA_BUS_FMT_SRGGB8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, { .name = "SBGGR10_ALAW8_1X8", .code = MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, { .name = "SGBRG10_ALAW8_1X8", .code = MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, { .name = "SGRBG10_ALAW8_1X8", .code = MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, { .name = "SRGGB10_ALAW8_1X8", .code = MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, { .name = "SBGGR10_DPCM8_1X8", .code = MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, { .name = "SGBRG10_DPCM8_1X8", .code = MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, { .name = "SGRBG10_DPCM8_1X8", .code = MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, { .name = "SRGGB10_DPCM8_1X8", .code = MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, { .name = "SBGGR10_2X8_PADHI_BE", .code = MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, { .name = "SBGGR10_2X8_PADHI_LE", .code = MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, { .name = "SBGGR10_2X8_PADLO_BE", .code = MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, { .name = "SBGGR10_2X8_PADLO_LE", .code = MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR10_1X10, { .name = "SBGGR10_1X10", .code = MEDIA_BUS_FMT_SBGGR10_1X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG10_1X10, { .name = "SGBRG10_1X10", .code = MEDIA_BUS_FMT_SGBRG10_1X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG10_1X10, { .name = "SGRBG10_1X10", .code = MEDIA_BUS_FMT_SGRBG10_1X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB10_1X10, { .name = "SRGGB10_1X10", .code = MEDIA_BUS_FMT_SRGGB10_1X10, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR12_1X12, { .name = "SBGGR12_1X12", .code = MEDIA_BUS_FMT_SBGGR12_1X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG12_1X12, { .name = "SGBRG12_1X12", .code = MEDIA_BUS_FMT_SGBRG12_1X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG12_1X12, { .name = "SGRBG12_1X12", .code = MEDIA_BUS_FMT_SGRBG12_1X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB12_1X12, { .name = "SRGGB12_1X12", .code = MEDIA_BUS_FMT_SRGGB12_1X12, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR14_1X14, { .name = "SBGGR14_1X14", .code = MEDIA_BUS_FMT_SBGGR14_1X14, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGBRG14_1X14, { .name = "SGBRG14_1X14", .code = MEDIA_BUS_FMT_SGBRG14_1X14, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SGRBG14_1X14, { .name = "SGRBG14_1X14", .code = MEDIA_BUS_FMT_SGRBG14_1X14, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SRGGB14_1X14, { .name = "SRGGB14_1X14", .code = MEDIA_BUS_FMT_SRGGB14_1X14, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, { MEDIA_BUS_FMT_SBGGR16_1X16, { .name = "SBGGR16_1X16", .code = MEDIA_BUS_FMT_SBGGR16_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW } }, { MEDIA_BUS_FMT_SGBRG16_1X16, { .name = "SGBRG16_1X16", .code = MEDIA_BUS_FMT_SGBRG16_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW } }, { MEDIA_BUS_FMT_SGRBG16_1X16, { .name = "SGRBG16_1X16", .code = MEDIA_BUS_FMT_SGRBG16_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW } }, { MEDIA_BUS_FMT_SRGGB16_1X16, { .name = "SRGGB16_1X16", .code = MEDIA_BUS_FMT_SRGGB16_1X16, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW } }, /* \todo Clarify colour encoding for HSV formats */ { MEDIA_BUS_FMT_AHSV8888_1X32, { .name = "AHSV8888_1X32", .code = MEDIA_BUS_FMT_AHSV8888_1X32, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, } }, { MEDIA_BUS_FMT_JPEG_1X8, { .name = "JPEG_1X8", .code = MEDIA_BUS_FMT_JPEG_1X8, .type = MediaBusFormatInfo::Type::Image, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, } }, { MEDIA_BUS_FMT_METADATA_FIXED, { .name = "METADATA_FIXED", .code = MEDIA_BUS_FMT_METADATA_FIXED, .type = MediaBusFormatInfo::Type::Metadata, .bitsPerPixel = 0, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, } }, }; } /* namespace */ /** * \fn bool MediaBusFormatInfo::isValid() const * \brief Check if the media bus format info is valid * \return True if the media bus format info is valid, false otherwise */ /** * \brief Retrieve information about a media bus format * \param[in] code The media bus format code * \return The MediaBusFormatInfo describing the \a code if known, or an invalid * MediaBusFormatInfo otherwise */ const MediaBusFormatInfo &MediaBusFormatInfo::info(uint32_t code) { static const MediaBusFormatInfo invalid{}; const auto it = mediaBusFormatInfo.find(code); if (it == mediaBusFormatInfo.end()) { LOG(V4L2, Warning) << "Unsupported media bus format " << utils::hex(code, 4); return invalid; } return it->second; } /** * \struct V4L2SubdeviceCapability * \brief struct v4l2_subdev_capability object wrapper and helpers * * The V4L2SubdeviceCapability structure manages the information returned by the * VIDIOC_SUBDEV_QUERYCAP ioctl. */ /** * \fn V4L2SubdeviceCapability::isReadOnly() * \brief Retrieve if a subdevice is registered as read-only * * A V4L2 subdevice is registered as read-only if V4L2_SUBDEV_CAP_RO_SUBDEV * is listed as part of its capabilities. * * \return True if the subdevice is registered as read-only, false otherwise */ /** * \fn V4L2SubdeviceCapability::hasStreams() * \brief Retrieve if a subdevice supports the V4L2 streams API * \return True if the subdevice supports the streams API, false otherwise */ /** * \struct V4L2SubdeviceFormat * \brief The V4L2 sub-device image format and sizes * * This structure describes the format of images when transported between * separate components connected through a physical bus, such as image sensor * and image receiver or between components part of the same System-on-Chip that * realize an image transformation pipeline. * * The format of images when transported on physical interconnections is known * as the "media bus format", and it is identified by a resolution and a pixel * format identification code, known as the "media bus code", not to be confused * with the fourcc code that identify the format of images when stored in memory * (see V4L2VideoDevice::V4L2DeviceFormat). * * Media Bus formats supported by the V4L2 APIs are described in Section * 4.15.3.4.1 of the "Part I - Video for Linux API" chapter of the "Linux Media * Infrastructure userspace API", part of the Linux kernel documentation. * * Image media bus formats are properties of the subdev pads. When images are * transported between two media pads identified by a 0-indexed number, the * image bus format configured on the two pads should match (according to the * underlying driver format matching criteria) in order to prepare for a * successful streaming operation. For a more detailed description of the image * format negotiation process when performed between V4L2 subdevices, refer to * Section 4.15.3.1 of the above mentioned Linux kernel documentation section. */ /** * \var V4L2SubdeviceFormat::code * \brief The image format bus code */ /** * \var V4L2SubdeviceFormat::size * \brief The image size in pixels */ /** * \var V4L2SubdeviceFormat::colorSpace * \brief The color space of the pixels * * The color space of the image. When setting the format this may be * unset, in which case the driver gets to use its default color space. * After being set, this value should contain the color space that * was actually used. If this value is unset, then the color space chosen * by the driver could not be represented by the ColorSpace class (and * should probably be added). * * It is up to the pipeline handler or application to check if the * resulting color space is acceptable. */ /** * \brief Assemble and return a string describing the format * \return A string describing the V4L2SubdeviceFormat */ const std::string V4L2SubdeviceFormat::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Insert a text representation of a V4L2SubdeviceFormat into an output * stream * \param[in] out The output stream * \param[in] f The V4L2SubdeviceFormat * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2SubdeviceFormat &f) { out << f.size << "-"; const auto it = mediaBusFormatInfo.find(f.code); if (it == mediaBusFormatInfo.end()) out << utils::hex(f.code, 4); else out << it->second.name; return out; } /** * \class V4L2Subdevice * \brief A V4L2 subdevice as exposed by the Linux kernel * * The V4L2Subdevice class provides an API to the "Sub-device interface" as * described in section 4.15 of the "Linux Media Infrastructure userspace API" * chapter of the Linux Kernel documentation. * * A V4L2Subdevice is constructed from a MediaEntity instance, using the system * path of the entity's device node. No API call other than open(), isOpen() * and close() shall be called on an unopened device instance. Upon destruction * any device left open will be closed, and any resources released. */ /** * \typedef V4L2Subdevice::Formats * \brief A map of supported media bus formats to frame sizes */ /** * \enum V4L2Subdevice::Whence * \brief Specify the type of format for getFormat() and setFormat() operations * \var V4L2Subdevice::ActiveFormat * \brief The format operation applies to ACTIVE formats * \var V4L2Subdevice::TryFormat * \brief The format operation applies to TRY formats */ /** * \class V4L2Subdevice::Stream * \brief V4L2 subdevice stream * * This class identifies a subdev stream, by bundling the pad number with the * stream number. It is used in all stream-aware functions of the V4L2Subdevice * class to identify the stream the functions operate on. * * \var V4L2Subdevice::Stream::pad * \brief The 0-indexed pad number * * \var V4L2Subdevice::Stream::stream * \brief The stream number */ /** * \fn V4L2Subdevice::Stream::Stream() * \brief Construct a Stream with pad and stream set to 0 */ /** * \fn V4L2Subdevice::Stream::Stream(unsigned int pad, unsigned int stream) * \brief Construct a Stream with a given \a pad and \a stream number * \param[in] pad The indexed pad number * \param[in] stream The stream number */ /** * \brief Compare streams for equality * \return True if the two streams are equal, false otherwise */ bool operator==(const V4L2Subdevice::Stream &lhs, const V4L2Subdevice::Stream &rhs) { return lhs.pad == rhs.pad && lhs.stream == rhs.stream; } /** * \fn bool operator!=(const V4L2Subdevice::Stream &lhs, const V4L2Subdevice::Stream &rhs) * \brief Compare streams for inequality * \return True if the two streams are not equal, false otherwise */ /** * \brief Insert a text representation of a V4L2Subdevice::Stream into an * output stream * \param[in] out The output stream * \param[in] stream The V4L2Subdevice::Stream * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2Subdevice::Stream &stream) { out << stream.pad << "/" << stream.stream; return out; } /** * \class V4L2Subdevice::Route * \brief V4L2 subdevice routing table entry * * This class models a route in the subdevice routing table. It is similar to * the v4l2_subdev_route structure, but uses the V4L2Subdevice::Stream class * for easier usage with the V4L2Subdevice stream-aware functions. * * \var V4L2Subdevice::Route::sink * \brief The sink stream of the route * * \var V4L2Subdevice::Route::source * \brief The source stream of the route * * \var V4L2Subdevice::Route::flags * \brief The route flags (V4L2_SUBDEV_ROUTE_FL_*) */ /** * \fn V4L2Subdevice::Route::Route() * \brief Construct a Route with default streams */ /** * \fn V4L2Subdevice::Route::Route(const Stream &sink, const Stream &source, * uint32_t flags) * \brief Construct a Route from \a sink to \a source * \param[in] sink The sink stream * \param[in] source The source stream * \param[in] flags The route flags */ /** * \brief Insert a text representation of a V4L2Subdevice::Route into an * output stream * \param[in] out The output stream * \param[in] route The V4L2Subdevice::Route * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2Subdevice::Route &route) { out << route.sink << " -> " << route.source << " (" << utils::hex(route.flags) << ")"; return out; } /** * \typedef V4L2Subdevice::Routing * \brief V4L2 subdevice routing table * * This class stores a subdevice routing table as a vector of routes. */ /** * \brief Insert a text representation of a V4L2Subdevice::Routing into an * output stream * \param[in] out The output stream * \param[in] routing The V4L2Subdevice::Routing * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2Subdevice::Routing &routing) { for (const auto &[i, route] : utils::enumerate(routing)) { out << "[" << i << "] " << route; if (i != routing.size() - 1) out << ", "; } return out; } /** * \brief Create a V4L2 subdevice from a MediaEntity using its device node * path */ V4L2Subdevice::V4L2Subdevice(const MediaEntity *entity) : V4L2Device(entity->deviceNode()), entity_(entity) { } V4L2Subdevice::~V4L2Subdevice() { close(); } /** * \brief Open a V4L2 subdevice * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::open() { int ret = V4L2Device::open(O_RDWR); if (ret) return ret; /* * Try to query the subdev capabilities. The VIDIOC_SUBDEV_QUERYCAP API * was introduced in kernel v5.8, ENOTTY errors must be ignored to * support older kernels. */ caps_ = {}; ret = ioctl(VIDIOC_SUBDEV_QUERYCAP, &caps_); if (ret < 0 && errno != ENOTTY) { ret = -errno; LOG(V4L2, Error) << "Unable to query capabilities: " << strerror(-ret); return ret; } /* If the subdev supports streams, enable the streams API. */ if (caps_.hasStreams()) { struct v4l2_subdev_client_capability clientCaps{}; clientCaps.capabilities = V4L2_SUBDEV_CLIENT_CAP_STREAMS; ret = ioctl(VIDIOC_SUBDEV_S_CLIENT_CAP, &clientCaps); if (ret < 0) { ret = -errno; LOG(V4L2, Error) << "Unable to set client capabilities: " << strerror(-ret); return ret; } } return 0; } /** * \fn V4L2Subdevice::entity() * \brief Retrieve the media entity associated with the subdevice * \return The subdevice's associated media entity. */ /** * \brief Get selection rectangle \a rect for \a target * \param[in] stream The stream the rectangle is retrieved from * \param[in] target The selection target defined by the V4L2_SEL_TGT_* flags * \param[out] rect The retrieved selection rectangle * * \todo Define a V4L2SelectionTarget enum for the selection target * * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::getSelection(const Stream &stream, unsigned int target, Rectangle *rect) { struct v4l2_subdev_selection sel = {}; sel.which = V4L2_SUBDEV_FORMAT_ACTIVE; sel.pad = stream.pad; sel.stream = stream.stream; sel.target = target; sel.flags = 0; int ret = ioctl(VIDIOC_SUBDEV_G_SELECTION, &sel); if (ret < 0) { LOG(V4L2, Error) << "Unable to get rectangle " << target << " on pad " << stream << ": " << strerror(-ret); return ret; } rect->x = sel.r.left; rect->y = sel.r.top; rect->width = sel.r.width; rect->height = sel.r.height; return 0; } /** * \fn V4L2Subdevice::getSelection(unsigned int pad, unsigned int target, * Rectangle *rect) * \brief Get selection rectangle \a rect for \a target * \param[in] pad The 0-indexed pad number the rectangle is retrieved from * \param[in] target The selection target defined by the V4L2_SEL_TGT_* flags * \param[out] rect The retrieved selection rectangle * * \return 0 on success or a negative error code otherwise */ /** * \brief Set selection rectangle \a rect for \a target * \param[in] stream The stream the rectangle is to be applied to * \param[in] target The selection target defined by the V4L2_SEL_TGT_* flags * \param[inout] rect The selection rectangle to be applied * * \todo Define a V4L2SelectionTarget enum for the selection target * * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::setSelection(const Stream &stream, unsigned int target, Rectangle *rect) { struct v4l2_subdev_selection sel = {}; sel.which = V4L2_SUBDEV_FORMAT_ACTIVE; sel.pad = stream.pad; sel.stream = stream.stream; sel.target = target; sel.flags = 0; sel.r.left = rect->x; sel.r.top = rect->y; sel.r.width = rect->width; sel.r.height = rect->height; int ret = ioctl(VIDIOC_SUBDEV_S_SELECTION, &sel); if (ret < 0) { LOG(V4L2, Error) << "Unable to set rectangle " << target << " on pad " << stream << ": " << strerror(-ret); return ret; } rect->x = sel.r.left; rect->y = sel.r.top; rect->width = sel.r.width; rect->height = sel.r.height; return 0; } /** * \fn V4L2Subdevice::setSelection(unsigned int pad, unsigned int target, * Rectangle *rect) * \brief Set selection rectangle \a rect for \a target * \param[in] pad The 0-indexed pad number the rectangle is to be applied to * \param[in] target The selection target defined by the V4L2_SEL_TGT_* flags * \param[inout] rect The selection rectangle to be applied * * \todo Define a V4L2SelectionTarget enum for the selection target * * \return 0 on success or a negative error code otherwise */ /** * \brief Enumerate all media bus codes and frame sizes on a \a stream * \param[in] stream The stream to enumerate formats for * * Enumerate all media bus codes and frame sizes supported by the subdevice on * a \a stream. * * \return A list of the supported device formats */ V4L2Subdevice::Formats V4L2Subdevice::formats(const Stream &stream) { Formats formats; if (stream.pad >= entity_->pads().size()) { LOG(V4L2, Error) << "Invalid pad: " << stream.pad; return {}; } for (unsigned int code : enumPadCodes(stream)) { std::vector<SizeRange> sizes = enumPadSizes(stream, code); if (sizes.empty()) return {}; const auto inserted = formats.insert({ code, sizes }); if (!inserted.second) { LOG(V4L2, Error) << "Could not add sizes for media bus code " << code << " on pad " << stream.pad; return {}; } } return formats; } /** * \fn V4L2Subdevice::formats(unsigned int pad) * \brief Enumerate all media bus codes and frame sizes on a \a pad * \param[in] pad The 0-indexed pad number to enumerate formats on * * Enumerate all media bus codes and frame sizes supported by the subdevice on * a \a pad * * \return A list of the supported device formats */ std::optional<ColorSpace> V4L2Subdevice::toColorSpace(const v4l2_mbus_framefmt &format) const { /* * Only image formats have a color space, for other formats (such as * metadata formats) the color space concept isn't applicable. V4L2 * subdev drivers return a colorspace set to V4L2_COLORSPACE_DEFAULT in * that case (as well as for image formats when the driver hasn't * bothered implementing color space support). Check the colorspace * field here and return std::nullopt directly to avoid logging a * warning. */ if (format.colorspace == V4L2_COLORSPACE_DEFAULT) return std::nullopt; PixelFormatInfo::ColourEncoding colourEncoding; const MediaBusFormatInfo &info = MediaBusFormatInfo::info(format.code); if (info.isValid()) { colourEncoding = info.colourEncoding; } else { LOG(V4L2, Warning) << "Unknown subdev format " << utils::hex(format.code, 4) << ", defaulting to RGB encoding"; colourEncoding = PixelFormatInfo::ColourEncodingRGB; } return V4L2Device::toColorSpace(format, colourEncoding); } /** * \brief Retrieve the image format set on one of the V4L2 subdevice streams * \param[in] stream The stream the format is to be retrieved from * \param[out] format The image bus format * \param[in] whence The format to get, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::getFormat(const Stream &stream, V4L2SubdeviceFormat *format, Whence whence) { struct v4l2_subdev_format subdevFmt = {}; subdevFmt.which = whence; subdevFmt.pad = stream.pad; subdevFmt.stream = stream.stream; int ret = ioctl(VIDIOC_SUBDEV_G_FMT, &subdevFmt); if (ret) { LOG(V4L2, Error) << "Unable to get format on pad " << stream << ": " << strerror(-ret); return ret; } format->size.width = subdevFmt.format.width; format->size.height = subdevFmt.format.height; format->code = subdevFmt.format.code; format->colorSpace = toColorSpace(subdevFmt.format); return 0; } /** * \fn V4L2Subdevice::getFormat(unsigned int pad, V4L2SubdeviceFormat *format, * Whence whence) * \brief Retrieve the image format set on one of the V4L2 subdevice pads * \param[in] pad The 0-indexed pad number the format is to be retrieved from * \param[out] format The image bus format * \param[in] whence The format to get, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * \return 0 on success or a negative error code otherwise */ /** * \brief Set an image format on one of the V4L2 subdevice pads * \param[in] stream The stream the format is to be applied to * \param[inout] format The image bus format to apply to the stream * \param[in] whence The format to set, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * * Apply the requested image format to the desired stream and return the * actually applied format parameters, as getFormat() would do. * * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::setFormat(const Stream &stream, V4L2SubdeviceFormat *format, Whence whence) { struct v4l2_subdev_format subdevFmt = {}; subdevFmt.which = whence; subdevFmt.pad = stream.pad; subdevFmt.stream = stream.stream; subdevFmt.format.width = format->size.width; subdevFmt.format.height = format->size.height; subdevFmt.format.code = format->code; subdevFmt.format.field = V4L2_FIELD_NONE; if (format->colorSpace) { fromColorSpace(format->colorSpace, subdevFmt.format); /* The CSC flag is only applicable to source pads. */ if (entity_->pads()[stream.pad]->flags() & MEDIA_PAD_FL_SOURCE) subdevFmt.format.flags |= V4L2_MBUS_FRAMEFMT_SET_CSC; } int ret = ioctl(VIDIOC_SUBDEV_S_FMT, &subdevFmt); if (ret) { LOG(V4L2, Error) << "Unable to set format on pad " << stream << ": " << strerror(-ret); return ret; } format->size.width = subdevFmt.format.width; format->size.height = subdevFmt.format.height; format->code = subdevFmt.format.code; format->colorSpace = toColorSpace(subdevFmt.format); return 0; } /** * \fn V4L2Subdevice::setFormat(unsigned int pad, V4L2SubdeviceFormat *format, * Whence whence) * \brief Set an image format on one of the V4L2 subdevice pads * \param[in] pad The 0-indexed pad number the format is to be applied to * \param[inout] format The image bus format to apply to the subdevice's pad * \param[in] whence The format to set, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * * Apply the requested image format to the desired media pad and return the * actually applied format parameters, as getFormat() would do. * * \return 0 on success or a negative error code otherwise */ namespace { void routeFromKernel(V4L2Subdevice::Route &route, const struct v4l2_subdev_route &kroute) { route.sink.pad = kroute.sink_pad; route.sink.stream = kroute.sink_stream; route.source.pad = kroute.source_pad; route.source.stream = kroute.source_stream; route.flags = kroute.flags; } void routeToKernel(const V4L2Subdevice::Route &route, struct v4l2_subdev_route &kroute) { kroute.sink_pad = route.sink.pad; kroute.sink_stream = route.sink.stream; kroute.source_pad = route.source.pad; kroute.source_stream = route.source.stream; kroute.flags = route.flags; } /* * Legacy routing support for pre-v6.10-rc1 kernels. Drop when v6.12-rc1 gets * released. */ struct v4l2_subdev_routing_legacy { __u32 which; __u32 num_routes; __u64 routes; __u32 reserved[6]; }; #define VIDIOC_SUBDEV_G_ROUTING_LEGACY _IOWR('V', 38, struct v4l2_subdev_routing_legacy) #define VIDIOC_SUBDEV_S_ROUTING_LEGACY _IOWR('V', 39, struct v4l2_subdev_routing_legacy) } /* namespace */ int V4L2Subdevice::getRoutingLegacy(Routing *routing, Whence whence) { struct v4l2_subdev_routing_legacy rt = {}; rt.which = whence; int ret = ioctl(VIDIOC_SUBDEV_G_ROUTING_LEGACY, &rt); if (ret == 0 || ret == -ENOTTY) return ret; if (ret != -ENOSPC) { LOG(V4L2, Error) << "Failed to retrieve number of routes: " << strerror(-ret); return ret; } std::vector<struct v4l2_subdev_route> routes{ rt.num_routes }; rt.routes = reinterpret_cast<uintptr_t>(routes.data()); ret = ioctl(VIDIOC_SUBDEV_G_ROUTING_LEGACY, &rt); if (ret) { LOG(V4L2, Error) << "Failed to retrieve routes: " << strerror(-ret); return ret; } if (rt.num_routes != routes.size()) { LOG(V4L2, Error) << "Invalid number of routes"; return -EINVAL; } routing->resize(rt.num_routes); for (const auto &[i, route] : utils::enumerate(routes)) routeFromKernel((*routing)[i], route); return 0; } /** * \brief Retrieve the subdevice's internal routing table * \param[out] routing The routing table * \param[in] whence The routing table to get, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::getRouting(Routing *routing, Whence whence) { routing->clear(); if (!caps_.hasStreams()) return 0; struct v4l2_subdev_routing rt = {}; rt.which = whence; int ret = ioctl(VIDIOC_SUBDEV_G_ROUTING, &rt); if (ret == -ENOTTY) return V4L2Subdevice::getRoutingLegacy(routing, whence); if (ret) { LOG(V4L2, Error) << "Failed to retrieve number of routes: " << strerror(-ret); return ret; } if (!rt.num_routes) return 0; std::vector<struct v4l2_subdev_route> routes{ rt.num_routes }; rt.routes = reinterpret_cast<uintptr_t>(routes.data()); rt.len_routes = rt.num_routes; rt.num_routes = 0; ret = ioctl(VIDIOC_SUBDEV_G_ROUTING, &rt); if (ret) { LOG(V4L2, Error) << "Failed to retrieve routes: " << strerror(-ret); return ret; } if (rt.num_routes != routes.size()) { LOG(V4L2, Error) << "Invalid number of routes"; return -EINVAL; } routing->resize(rt.num_routes); for (const auto &[i, route] : utils::enumerate(routes)) routeFromKernel((*routing)[i], route); return 0; } int V4L2Subdevice::setRoutingLegacy(Routing *routing, Whence whence) { std::vector<struct v4l2_subdev_route> routes{ routing->size() }; for (const auto &[i, route] : utils::enumerate(*routing)) routeToKernel(route, routes[i]); struct v4l2_subdev_routing_legacy rt = {}; rt.which = whence; rt.num_routes = routes.size(); rt.routes = reinterpret_cast<uintptr_t>(routes.data()); int ret = ioctl(VIDIOC_SUBDEV_S_ROUTING_LEGACY, &rt); if (ret) { LOG(V4L2, Error) << "Failed to set routes: " << strerror(-ret); return ret; } routes.resize(rt.num_routes); routing->resize(rt.num_routes); for (const auto &[i, route] : utils::enumerate(routes)) routeFromKernel((*routing)[i], route); return 0; } /** * \brief Set a routing table on the V4L2 subdevice * \param[inout] routing The routing table * \param[in] whence The routing table to set, \ref V4L2Subdevice::ActiveFormat * "ActiveFormat" or \ref V4L2Subdevice::TryFormat "TryFormat" * * Apply to the V4L2 subdevice the routing table \a routing and update its * content to reflect the actually applied routing table as getRouting() would * do. * * \return 0 on success or a negative error code otherwise */ int V4L2Subdevice::setRouting(Routing *routing, Whence whence) { if (!caps_.hasStreams()) { routing->clear(); return 0; } std::vector<struct v4l2_subdev_route> routes{ routing->size() }; for (const auto &[i, route] : utils::enumerate(*routing)) routeToKernel(route, routes[i]); struct v4l2_subdev_routing rt = {}; rt.which = whence; rt.len_routes = routes.size(); rt.num_routes = routes.size(); rt.routes = reinterpret_cast<uintptr_t>(routes.data()); int ret = ioctl(VIDIOC_SUBDEV_S_ROUTING, &rt); if (ret == -ENOTTY) return setRoutingLegacy(routing, whence); if (ret) { LOG(V4L2, Error) << "Failed to set routes: " << strerror(-ret); return ret; } /* * The kernel may want to return more routes than we have space for. In * that event, we must issue a VIDIOC_SUBDEV_G_ROUTING call to retrieve * the additional routes. */ if (rt.num_routes > routes.size()) { routes.resize(rt.num_routes); rt.len_routes = rt.num_routes; rt.num_routes = 0; ret = ioctl(VIDIOC_SUBDEV_G_ROUTING, &rt); if (ret) { LOG(V4L2, Error) << "Failed to retrieve routes: " << strerror(-ret); return ret; } } if (rt.num_routes != routes.size()) { LOG(V4L2, Error) << "Invalid number of routes"; return -EINVAL; } routing->resize(rt.num_routes); for (const auto &[i, route] : utils::enumerate(routes)) routeFromKernel((*routing)[i], route); return 0; } /** * \brief Retrieve the model name of the device * * The model name allows identification of the specific device model. This can * be used to infer device characteristics, for instance to determine the * analogue gain model of a camera sensor based on the sensor model name. * * Neither the V4L2 API nor the Media Controller API expose an explicit model * name. This function implements a heuristics to extract the model name from * the subdevice's entity name. This should produce accurate results for * I2C-based devices. If the heuristics can't match a known naming pattern, * the function returns the full entity name. * * \return The model name of the device */ const std::string &V4L2Subdevice::model() { if (!model_.empty()) return model_; /* * Extract model name from the media entity name. * * There is no standardized naming scheme for sensor or other entities * in the Linux kernel at the moment. * * - The most common rule, used by I2C sensors, associates the model * name with the I2C bus number and address (e.g. 'imx219 0-0010'). * * - When the sensor exposes multiple subdevs, the model name is * usually followed by a function name, as in the smiapp driver (e.g. * 'jt8ew9 pixel_array 0-0010'). * * - The vimc driver names its sensors 'Sensor A' and 'Sensor B'. * * Other schemes probably exist. As a best effort heuristic, use the * part of the entity name before the first space if the name contains * an I2C address, and use the full entity name otherwise. */ std::string entityName = entity_->name(); std::regex i2cRegex{ " [0-9]+-[0-9a-f]{4}" }; std::smatch match; std::string model; if (std::regex_search(entityName, match, i2cRegex)) model_ = entityName.substr(0, entityName.find(' ')); else model_ = entityName; return model_; } /** * \fn V4L2Subdevice::caps() * \brief Retrieve the subdevice V4L2 capabilities * \return The subdevice V4L2 capabilities */ /** * \brief Create a new video subdevice instance from \a entity in media device * \a media * \param[in] media The media device where the entity is registered * \param[in] entity The media entity name * * \return A newly created V4L2Subdevice on success, nullptr otherwise */ std::unique_ptr<V4L2Subdevice> V4L2Subdevice::fromEntityName(const MediaDevice *media, const std::string &entity) { MediaEntity *mediaEntity = media->getEntityByName(entity); if (!mediaEntity) return nullptr; return std::make_unique<V4L2Subdevice>(mediaEntity); } std::string V4L2Subdevice::logPrefix() const { return "'" + entity_->name() + "'"; } std::vector<unsigned int> V4L2Subdevice::enumPadCodes(const Stream &stream) { std::vector<unsigned int> codes; int ret; for (unsigned int index = 0; ; index++) { struct v4l2_subdev_mbus_code_enum mbusEnum = {}; mbusEnum.pad = stream.pad; mbusEnum.stream = stream.stream; mbusEnum.index = index; mbusEnum.which = V4L2_SUBDEV_FORMAT_ACTIVE; ret = ioctl(VIDIOC_SUBDEV_ENUM_MBUS_CODE, &mbusEnum); if (ret) break; codes.push_back(mbusEnum.code); } if (ret < 0 && ret != -EINVAL) { LOG(V4L2, Error) << "Unable to enumerate formats on pad " << stream << ": " << strerror(-ret); return {}; } return codes; } std::vector<SizeRange> V4L2Subdevice::enumPadSizes(const Stream &stream, unsigned int code) { std::vector<SizeRange> sizes; int ret; for (unsigned int index = 0;; index++) { struct v4l2_subdev_frame_size_enum sizeEnum = {}; sizeEnum.index = index; sizeEnum.pad = stream.pad; sizeEnum.stream = stream.stream; sizeEnum.code = code; sizeEnum.which = V4L2_SUBDEV_FORMAT_ACTIVE; ret = ioctl(VIDIOC_SUBDEV_ENUM_FRAME_SIZE, &sizeEnum); if (ret) break; sizes.emplace_back(Size{ sizeEnum.min_width, sizeEnum.min_height }, Size{ sizeEnum.max_width, sizeEnum.max_height }); } if (ret < 0 && ret != -EINVAL && ret != -ENOTTY) { LOG(V4L2, Error) << "Unable to enumerate sizes on pad " << stream << ": " << strerror(-ret); return {}; } return sizes; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/device_enumerator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Enumeration and matching */ #include "libcamera/internal/device_enumerator.h" #include <string.h> #include <libcamera/base/log.h> #include "libcamera/internal/device_enumerator_sysfs.h" #include "libcamera/internal/device_enumerator_udev.h" #include "libcamera/internal/media_device.h" /** * \file device_enumerator.h * \brief Enumeration and matching of media devices * * The purpose of device enumeration and matching is to find media devices in * the system and map them to pipeline handlers. * * At the core of the enumeration is the DeviceEnumerator class, responsible * for enumerating all media devices in the system. It handles all interactions * with the operating system in a platform-specific way. For each media device * found an instance of MediaDevice is created to store information about the * device gathered from the kernel through the Media Controller API. * * The DeviceEnumerator can enumerate all or specific media devices in the * system. When a new media device is added the enumerator creates a * corresponding MediaDevice instance. * * The enumerator supports searching among enumerated devices based on criteria * expressed in a DeviceMatch object. */ namespace libcamera { LOG_DEFINE_CATEGORY(DeviceEnumerator) /** * \class DeviceMatch * \brief Description of a media device search pattern * * The DeviceMatch class describes a media device using properties from the * Media Controller struct media_device_info, entity names in the media graph * or other properties that can be used to identify a media device. * * The description is meant to be filled by pipeline managers and passed to a * device enumerator to find matching media devices. * * A DeviceMatch is created with a specific Linux device driver in mind, * therefore the name of the driver is a required property. One or more Entity * names can be added as match criteria. * * Pipeline handlers are recommended to add entities to DeviceMatch as * appropriate to ensure that the media device they need can be uniquely * identified. This is useful when the corresponding kernel driver can produce * different graphs, for instance as a result of different driver versions or * hardware configurations, and not all those graphs are suitable for a pipeline * handler. */ /** * \brief Construct a media device search pattern * \param[in] driver The Linux device driver name that created the media device */ DeviceMatch::DeviceMatch(const std::string &driver) : driver_(driver) { } /** * \brief Add a media entity name to the search pattern * \param[in] entity The name of the entity in the media graph */ void DeviceMatch::add(const std::string &entity) { entities_.push_back(entity); } /** * \brief Compare a search pattern with a media device * \param[in] device The media device * * Matching is performed on the Linux device driver name and entity names from * the media graph. A match is found if both the driver name matches and the * media device contains all the entities listed in the search pattern. * * \return true if the media device matches the search pattern, false otherwise */ bool DeviceMatch::match(const MediaDevice *device) const { if (driver_ != device->driver()) return false; for (const std::string &name : entities_) { bool found = false; for (const MediaEntity *entity : device->entities()) { if (name == entity->name()) { if (!entity->deviceNode().empty()) { found = true; break; } else { LOG(DeviceEnumerator, Debug) << "Skip " << entity->name() << ": no device node"; } } } if (!found) return false; } return true; } /** * \class DeviceEnumerator * \brief Enumerate, store and search media devices * * The DeviceEnumerator class is responsible for all interactions with the * operating system related to media devices. It enumerates all media devices * in the system, and for each device found creates an instance of the * MediaDevice class and stores it internally. The list of media devices can * then be searched using DeviceMatch search patterns. * * The enumerator also associates media device entities with device node paths. */ /** * \brief Create a new device enumerator matching the systems capabilities * * Depending on how the operating system handles device detection, hot-plug * notification and device node lookup, different device enumerator * implementations may be needed. This function creates the best enumerator for * the operating system based on the available resources. Not all different * enumerator types are guaranteed to support all features. * * \return A pointer to the newly created device enumerator on success, or * nullptr if an error occurs */ std::unique_ptr<DeviceEnumerator> DeviceEnumerator::create() { std::unique_ptr<DeviceEnumerator> enumerator; #ifdef HAVE_LIBUDEV enumerator = std::make_unique<DeviceEnumeratorUdev>(); if (!enumerator->init()) return enumerator; #endif /* * Either udev is not available or udev initialization failed. Fall back * on the sysfs enumerator. */ enumerator = std::make_unique<DeviceEnumeratorSysfs>(); if (!enumerator->init()) return enumerator; return nullptr; } DeviceEnumerator::~DeviceEnumerator() { for (const std::shared_ptr<MediaDevice> &media : devices_) { if (media->busy()) LOG(DeviceEnumerator, Error) << "Removing media device " << media->deviceNode() << " while still in use"; } } /** * \fn DeviceEnumerator::init() * \brief Initialize the enumerator * \return 0 on success or a negative error code otherwise * \retval -EBUSY the enumerator has already been initialized * \retval -ENODEV the enumerator can't enumerate devices */ /** * \fn DeviceEnumerator::enumerate() * \brief Enumerate all media devices in the system * * This function finds and add all media devices in the system to the * enumerator. It shall be implemented by all subclasses of DeviceEnumerator * using system-specific methods. * * Individual media devices that can't be properly enumerated shall be skipped * with a warning message logged, without returning an error. Only errors that * prevent enumeration altogether shall be fatal. * * \context This function is \threadbound. * * \return 0 on success or a negative error code otherwise */ /** * \brief Create a media device instance * \param[in] deviceNode path to the media device to create * * Create a media device for the \a deviceNode, open it, and populate its * media graph. The device enumerator shall then populate the media device by * associating device nodes with entities using MediaEntity::setDeviceNode(). * This process is specific to each device enumerator, and the device enumerator * shall ensure that device nodes are ready to be used (for instance, if * applicable, by waiting for device nodes to be created and access permissions * to be set by the system). Once done, it shall add the media device to the * system with addDevice(). * * \return Created media device instance on success, or nullptr otherwise */ std::unique_ptr<MediaDevice> DeviceEnumerator::createDevice(const std::string &deviceNode) { std::unique_ptr<MediaDevice> media = std::make_unique<MediaDevice>(deviceNode); int ret = media->populate(); if (ret < 0) { LOG(DeviceEnumerator, Info) << "Unable to populate media device " << deviceNode << " (" << strerror(-ret) << "), skipping"; return nullptr; } LOG(DeviceEnumerator, Debug) << "New media device \"" << media->driver() << "\" created from " << deviceNode; return media; } /** * \var DeviceEnumerator::devicesAdded * \brief Notify of new media devices being found * * This signal is emitted when the device enumerator finds new media devices in * the system. It may be emitted for every newly detected device, or once for * multiple devices, at the discretion of the device enumerator. Not all device * enumerator types may support dynamic detection of new devices. */ /** * \brief Add a media device to the enumerator * \param[in] media media device instance to add * * Store the media device in the internal list for later matching with * pipeline handlers. \a media shall be created with createDevice() first. * This function shall be called after all members of the entities of the * media graph have been confirmed to be initialized. */ void DeviceEnumerator::addDevice(std::unique_ptr<MediaDevice> media) { LOG(DeviceEnumerator, Debug) << "Added device " << media->deviceNode() << ": " << media->driver(); devices_.push_back(std::move(media)); /* \todo To batch multiple additions, emit with a small delay here. */ devicesAdded.emit(); } /** * \brief Remove a media device from the enumerator * \param[in] deviceNode Path to the media device to remove * * Remove the media device identified by \a deviceNode previously added to the * enumerator with addDevice(). The media device's MediaDevice::disconnected * signal is emitted. */ void DeviceEnumerator::removeDevice(const std::string &deviceNode) { std::shared_ptr<MediaDevice> media; for (auto iter = devices_.begin(); iter != devices_.end(); ++iter) { if ((*iter)->deviceNode() == deviceNode) { media = std::move(*iter); devices_.erase(iter); break; } } if (!media) { LOG(DeviceEnumerator, Warning) << "Media device for node " << deviceNode << " not found"; return; } LOG(DeviceEnumerator, Debug) << "Media device for node " << deviceNode << " removed."; media->disconnected.emit(); } /** * \brief Search available media devices for a pattern match * \param[in] dm Search pattern * * Search in the enumerated media devices that are not already in use for a * match described in \a dm. If a match is found and the caller intends to use * it the caller is responsible for acquiring the MediaDevice object and * releasing it when done with it. * * \return pointer to the matching MediaDevice, or nullptr if no match is found */ std::shared_ptr<MediaDevice> DeviceEnumerator::search(const DeviceMatch &dm) { for (std::shared_ptr<MediaDevice> &media : devices_) { if (media->busy()) continue; if (dm.match(media.get())) { LOG(DeviceEnumerator, Debug) << "Successful match for media device \"" << media->driver() << "\""; return media; } } return nullptr; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/pipeline_handler.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Pipeline handler infrastructure */ #include "libcamera/internal/pipeline_handler.h" #include <chrono> #include <sys/stat.h> #include <sys/sysmacros.h> #include <libcamera/base/log.h> #include <libcamera/base/mutex.h> #include <libcamera/base/utils.h> #include <libcamera/camera.h> #include <libcamera/framebuffer.h> #include <libcamera/property_ids.h> #include "libcamera/internal/camera.h" #include "libcamera/internal/camera_manager.h" #include "libcamera/internal/device_enumerator.h" #include "libcamera/internal/framebuffer.h" #include "libcamera/internal/media_device.h" #include "libcamera/internal/request.h" #include "libcamera/internal/tracepoints.h" /** * \file pipeline_handler.h * \brief Create pipelines and cameras from a set of media devices * * Each pipeline supported by libcamera needs to be backed by a pipeline * handler implementation that operate on a set of media devices. The pipeline * handler is responsible for matching the media devices it requires with the * devices present in the system, and once all those devices can be acquired, * create corresponding Camera instances. * * Every subclass of PipelineHandler shall be registered with libcamera using * the REGISTER_PIPELINE_HANDLER() macro. */ using namespace std::chrono_literals; namespace libcamera { LOG_DEFINE_CATEGORY(Pipeline) /** * \class PipelineHandler * \brief Create and manage cameras based on a set of media devices * * The PipelineHandler matches the media devices provided by a DeviceEnumerator * with the pipelines it supports and creates corresponding Camera devices. * * Pipeline handler instances are reference-counted through std::shared_ptr<>. * They implement std::enable_shared_from_this<> in order to create new * std::shared_ptr<> in code paths originating from member functions of the * PipelineHandler class where only the 'this' pointer is available. */ /** * \brief Construct a PipelineHandler instance * \param[in] manager The camera manager * * In order to honour the std::enable_shared_from_this<> contract, * PipelineHandler instances shall never be constructed manually, but always * through the PipelineHandlerFactoryBase::create() function. */ PipelineHandler::PipelineHandler(CameraManager *manager) : manager_(manager), useCount_(0) { } PipelineHandler::~PipelineHandler() { for (std::shared_ptr<MediaDevice> media : mediaDevices_) media->release(); } /** * \fn PipelineHandler::match(DeviceEnumerator *enumerator) * \brief Match media devices and create camera instances * \param[in] enumerator The enumerator providing all media devices found in the * system * * This function is the main entry point of the pipeline handler. It is called * by the camera manager with the \a enumerator passed as an argument. It shall * acquire from the \a enumerator all the media devices it needs for a single * pipeline, create one or multiple Camera instances and register them with the * camera manager. * * If all media devices needed by the pipeline handler are found, they must all * be acquired by a call to MediaDevice::acquire(). This function shall then * create the corresponding Camera instances, store them internally, and return * true. Otherwise it shall not acquire any media device (or shall release all * the media devices is has acquired by calling MediaDevice::release()) and * return false. * * If multiple instances of a pipeline are available in the system, the * PipelineHandler class will be instantiated once per instance, and its match() * function called for every instance. Each call shall acquire media devices for * one pipeline instance, until all compatible media devices are exhausted. * * If this function returns true, a new instance of the pipeline handler will * be created and its match() function called. * * \context This function is called from the CameraManager thread. * * \return true if media devices have been acquired and camera instances * created, or false otherwise */ /** * \brief Search and acquire a MediaDevice matching a device pattern * \param[in] enumerator Enumerator containing all media devices in the system * \param[in] dm Device match pattern * * Search the device \a enumerator for an available media device matching the * device match pattern \a dm. Matching media device that have previously been * acquired by MediaDevice::acquire() are not considered. If a match is found, * the media device is acquired and returned. The caller shall not release the * device explicitly, it will be automatically released when the pipeline * handler is destroyed. * * \context This function shall be called from the CameraManager thread. * * \return A pointer to the matching MediaDevice, or nullptr if no match is found */ MediaDevice *PipelineHandler::acquireMediaDevice(DeviceEnumerator *enumerator, const DeviceMatch &dm) { std::shared_ptr<MediaDevice> media = enumerator->search(dm); if (!media) return nullptr; if (!media->acquire()) return nullptr; mediaDevices_.push_back(media); return media.get(); } /** * \brief Acquire exclusive access to the pipeline handler for the process * * This function locks all the media devices used by the pipeline to ensure * that no other process can access them concurrently. * * Access to a pipeline handler may be acquired recursively from within the * same process. Every successful acquire() call shall be matched with a * release() call. This allows concurrent access to the same pipeline handler * from different cameras within the same process. * * Pipeline handlers shall not call this function directly as the Camera class * handles access internally. * * \context This function is \threadsafe. * * \return True if the pipeline handler was acquired, false if another process * has already acquired it * \sa release() */ bool PipelineHandler::acquire() { MutexLocker locker(lock_); if (useCount_) { ++useCount_; return true; } for (std::shared_ptr<MediaDevice> &media : mediaDevices_) { if (!media->lock()) { unlockMediaDevices(); return false; } } ++useCount_; return true; } /** * \brief Release exclusive access to the pipeline handler * \param[in] camera The camera for which to release data * * This function releases access to the pipeline handler previously acquired by * a call to acquire(). Every release() call shall match a previous successful * acquire() call. Calling this function on a pipeline handler that hasn't been * acquired results in undefined behaviour. * * Pipeline handlers shall not call this function directly as the Camera class * handles access internally. * * \context This function is \threadsafe. * * \sa acquire() */ void PipelineHandler::release(Camera *camera) { MutexLocker locker(lock_); ASSERT(useCount_); unlockMediaDevices(); releaseDevice(camera); --useCount_; } /** * \brief Release resources associated with this camera * \param[in] camera The camera for which to release resources * * Pipeline handlers may override this in order to perform cleanup operations * when a camera is released, such as freeing memory. */ void PipelineHandler::releaseDevice([[maybe_unused]] Camera *camera) { } void PipelineHandler::unlockMediaDevices() { for (std::shared_ptr<MediaDevice> &media : mediaDevices_) media->unlock(); } /** * \fn PipelineHandler::generateConfiguration() * \brief Generate a camera configuration for a specified camera * \param[in] camera The camera to generate a default configuration for * \param[in] roles A list of stream roles * * Generate a default configuration for the \a camera for a specified list of * stream roles. The caller shall populate the \a roles with the use-cases it * wishes to fetch the default configuration for. The returned configuration * can then be examined by the caller to learn about the selected streams and * their default parameters. * * The intended companion to this is \a configure() which can be used to change * the group of streams parameters. * * \context This function may be called from any thread and shall be * \threadsafe. It shall not modify the state of the \a camera in the pipeline * handler. * * \return A valid CameraConfiguration if the requested roles can be satisfied, * or a null pointer otherwise. */ /** * \fn PipelineHandler::configure() * \brief Configure a group of streams for capture * \param[in] camera The camera to configure * \param[in] config The camera configurations to setup * * Configure the specified group of streams for \a camera according to the * configuration specified in \a config. The intended caller of this interface * is the Camera class which will receive configuration to apply from the * application. * * The configuration is guaranteed to have been validated with * CameraConfiguration::validate(). The pipeline handler implementation shall * not perform further validation and may rely on any custom field stored in its * custom CameraConfiguration derived class. * * When configuring the camera the pipeline handler shall associate a Stream * instance to each StreamConfiguration entry in the CameraConfiguration using * the StreamConfiguration::setStream() function. * * \context This function is called from the CameraManager thread. * * \return 0 on success or a negative error code otherwise */ /** * \fn PipelineHandler::exportFrameBuffers() * \brief Allocate and export buffers for \a stream * \param[in] camera The camera * \param[in] stream The stream to allocate buffers for * \param[out] buffers Array of buffers successfully allocated * * This function allocates buffers for the \a stream from the devices associated * with the stream in the corresponding pipeline handler. Those buffers shall be * suitable to be added to a Request for the stream, and shall be mappable to * the CPU through their associated dmabufs with mmap(). * * The function may only be called after the Camera has been configured and * before it gets started, or after it gets stopped. It shall be called only for * streams that are part of the active camera configuration. * * The only intended caller is Camera::exportFrameBuffers(). * * \context This function is called from the CameraManager thread. * * \return The number of allocated buffers on success or a negative error code * otherwise */ /** * \fn PipelineHandler::start() * \brief Start capturing from a group of streams * \param[in] camera The camera to start * \param[in] controls Controls to be applied before starting the Camera * * Start the group of streams that have been configured for capture by * \a configure(). The intended caller of this function is the Camera class * which will in turn be called from the application to indicate that it has * configured the streams and is ready to capture. * * \context This function is called from the CameraManager thread. * * \return 0 on success or a negative error code otherwise */ /** * \brief Stop capturing from all running streams and cancel pending requests * \param[in] camera The camera to stop * * This function stops capturing and processing requests immediately. All * pending requests are cancelled and complete immediately in an error state. * * \context This function is called from the CameraManager thread. */ void PipelineHandler::stop(Camera *camera) { /* Stop the pipeline handler and let the queued requests complete. */ stopDevice(camera); /* Cancel and signal as complete all waiting requests. */ while (!waitingRequests_.empty()) { Request *request = waitingRequests_.front(); waitingRequests_.pop(); request->_d()->cancel(); completeRequest(request); } /* Make sure no requests are pending. */ Camera::Private *data = camera->_d(); ASSERT(data->queuedRequests_.empty()); data->requestSequence_ = 0; } /** * \fn PipelineHandler::stopDevice() * \brief Stop capturing from all running streams * \param[in] camera The camera to stop * * This function stops capturing and processing requests immediately. All * pending requests are cancelled and complete immediately in an error state. */ /** * \brief Determine if the camera has any requests pending * \param[in] camera The camera to check * * This function determines if there are any requests queued to the pipeline * awaiting processing. * * \return True if there are pending requests, or false otherwise */ bool PipelineHandler::hasPendingRequests(const Camera *camera) const { return !camera->_d()->queuedRequests_.empty(); } /** * \fn PipelineHandler::registerRequest() * \brief Register a request for use by the pipeline handler * \param[in] request The request to register * * This function is called when the request is created, and allows the pipeline * handler to perform any one-time initialization it requries for the request. */ void PipelineHandler::registerRequest(Request *request) { /* * Connect the request prepared signal to notify the pipeline handler * when a request is ready to be processed. */ request->_d()->prepared.connect(this, &PipelineHandler::doQueueRequests); } /** * \fn PipelineHandler::queueRequest() * \brief Queue a request * \param[in] request The request to queue * * This function queues a capture request to the pipeline handler for * processing. The request is first added to the internal list of waiting * requests which have to be prepared to make sure they are ready for being * queued to the pipeline handler. * * The queue of waiting requests is iterated and all prepared requests are * passed to the pipeline handler in the same order they have been queued by * calling this function. * * If a Request fails during the preparation phase or if the pipeline handler * fails in queuing the request to the hardware the request is cancelled. * * Keeping track of queued requests ensures automatic completion of all requests * when the pipeline handler is stopped with stop(). Request completion shall be * signalled by the pipeline handler using the completeRequest() function. * * \context This function is called from the CameraManager thread. */ void PipelineHandler::queueRequest(Request *request) { LIBCAMERA_TRACEPOINT(request_queue, request); waitingRequests_.push(request); request->_d()->prepare(300ms); } /** * \brief Queue one requests to the device */ void PipelineHandler::doQueueRequest(Request *request) { LIBCAMERA_TRACEPOINT(request_device_queue, request); Camera *camera = request->_d()->camera(); Camera::Private *data = camera->_d(); data->queuedRequests_.push_back(request); request->_d()->sequence_ = data->requestSequence_++; if (request->_d()->cancelled_) { completeRequest(request); return; } int ret = queueRequestDevice(camera, request); if (ret) { request->_d()->cancel(); completeRequest(request); } } /** * \brief Queue prepared requests to the device * * Iterate the list of waiting requests and queue them to the device one * by one if they have been prepared. */ void PipelineHandler::doQueueRequests() { while (!waitingRequests_.empty()) { Request *request = waitingRequests_.front(); if (!request->_d()->prepared_) break; doQueueRequest(request); waitingRequests_.pop(); } } /** * \fn PipelineHandler::queueRequestDevice() * \brief Queue a request to the device * \param[in] camera The camera to queue the request to * \param[in] request The request to queue * * This function queues a capture request to the device for processing. The * request contains a set of buffers associated with streams and a set of * parameters. The pipeline handler shall program the device to ensure that the * parameters will be applied to the frames captured in the buffers provided in * the request. * * \context This function is called from the CameraManager thread. * * \return 0 on success or a negative error code otherwise */ /** * \brief Complete a buffer for a request * \param[in] request The request the buffer belongs to * \param[in] buffer The buffer that has completed * * This function shall be called by pipeline handlers to signal completion of * the \a buffer part of the \a request. It notifies applications of buffer * completion and updates the request's internal buffer tracking. The request * is not completed automatically when the last buffer completes to give * pipeline handlers a chance to perform any operation that may still be * needed. They shall complete requests explicitly with completeRequest(). * * \context This function shall be called from the CameraManager thread. * * \return True if all buffers contained in the request have completed, false * otherwise */ bool PipelineHandler::completeBuffer(Request *request, FrameBuffer *buffer) { Camera *camera = request->_d()->camera(); camera->bufferCompleted.emit(request, buffer); return request->_d()->completeBuffer(buffer); } /** * \brief Signal request completion * \param[in] request The request that has completed * * The pipeline handler shall call this function to notify the \a camera that * the request has completed. The request is no longer managed by the pipeline * handler and shall not be accessed once this function returns. * * This function ensures that requests will be returned to the application in * submission order, the pipeline handler may call it on any complete request * without any ordering constraint. * * \context This function shall be called from the CameraManager thread. */ void PipelineHandler::completeRequest(Request *request) { Camera *camera = request->_d()->camera(); request->_d()->complete(); Camera::Private *data = camera->_d(); while (!data->queuedRequests_.empty()) { Request *req = data->queuedRequests_.front(); if (req->status() == Request::RequestPending) break; ASSERT(!req->hasPendingBuffers()); data->queuedRequests_.pop_front(); camera->requestComplete(req); } } /** * \brief Retrieve the absolute path to a platform configuration file * \param[in] subdir The pipeline handler specific subdirectory name * \param[in] name The configuration file name * * This function locates a named platform configuration file and returns * its absolute path to the pipeline handler. It searches the following * directories, in order: * * - If libcamera is not installed, the src/libcamera/pipeline/\<subdir\>/data/ * directory within the source tree ; otherwise * - The system data (share/libcamera/pipeline/\<subdir\>) directory. * * The system directories are not searched if libcamera is not installed. * * \return The full path to the pipeline handler configuration file, or an empty * string if no configuration file can be found */ std::string PipelineHandler::configurationFile(const std::string &subdir, const std::string &name) const { std::string confPath; struct stat statbuf; int ret; std::string root = utils::libcameraSourcePath(); if (!root.empty()) { /* * When libcamera is used before it is installed, load * configuration files from the source directory. The * configuration files are then located in the 'data' * subdirectory of the corresponding pipeline handler. */ std::string confDir = root + "src/libcamera/pipeline/"; confPath = confDir + subdir + "/data/" + name; LOG(Pipeline, Info) << "libcamera is not installed. Loading platform configuration file from '" << confPath << "'"; } else { /* Else look in the system locations. */ confPath = std::string(LIBCAMERA_DATA_DIR) + "/pipeline/" + subdir + '/' + name; } ret = stat(confPath.c_str(), &statbuf); if (ret == 0 && (statbuf.st_mode & S_IFMT) == S_IFREG) return confPath; LOG(Pipeline, Error) << "Configuration file '" << confPath << "' not found for pipeline handler '" << PipelineHandler::name() << "'"; return std::string(); } /** * \brief Register a camera to the camera manager and pipeline handler * \param[in] camera The camera to be added * * This function is called by pipeline handlers to register the cameras they * handle with the camera manager. * * \context This function shall be called from the CameraManager thread. */ void PipelineHandler::registerCamera(std::shared_ptr<Camera> camera) { cameras_.push_back(camera); if (mediaDevices_.empty()) LOG(Pipeline, Fatal) << "Registering camera with no media devices!"; /* * Walk the entity list and map the devnums of all capture video nodes * to the camera. */ std::vector<int64_t> devnums; for (const std::shared_ptr<MediaDevice> &media : mediaDevices_) { for (const MediaEntity *entity : media->entities()) { if (entity->pads().size() == 1 && (entity->pads()[0]->flags() & MEDIA_PAD_FL_SINK) && entity->function() == MEDIA_ENT_F_IO_V4L) { devnums.push_back(makedev(entity->deviceMajor(), entity->deviceMinor())); } } } /* * Store the associated devices as a property of the camera to allow * systems to identify which devices are managed by libcamera. */ Camera::Private *data = camera->_d(); data->properties_.set(properties::SystemDevices, devnums); manager_->_d()->addCamera(std::move(camera)); } /** * \brief Enable hotplug handling for a media device * \param[in] media The media device * * This function enables hotplug handling, and especially hot-unplug handling, * of the \a media device. It shall be called by pipeline handlers for all the * media devices that can be disconnected. * * When a media device passed to this function is later unplugged, the pipeline * handler gets notified and automatically disconnects all the cameras it has * registered without requiring any manual intervention. */ void PipelineHandler::hotplugMediaDevice(MediaDevice *media) { media->disconnected.connect(this, [this, media] { mediaDeviceDisconnected(media); }); } /** * \brief Slot for the MediaDevice disconnected signal */ void PipelineHandler::mediaDeviceDisconnected(MediaDevice *media) { media->disconnected.disconnect(this); if (cameras_.empty()) return; disconnect(); } /** * \brief Device disconnection handler * * This virtual function is called to notify the pipeline handler that the * device it handles has been disconnected. It notifies all cameras created by * the pipeline handler that they have been disconnected, and unregisters them * from the camera manager. * * The function can be overloaded by pipeline handlers to perform custom * operations at disconnection time. Any overloaded version shall call the * PipelineHandler::disconnect() base function for proper hot-unplug operation. */ void PipelineHandler::disconnect() { /* * Each camera holds a reference to its associated pipeline handler * instance. Hence, when the last camera is dropped, the pipeline * handler will get destroyed by the last manager_->removeCamera(camera) * call in the loop below. * * This is acceptable as long as we make sure that the code path does not * access any member of the (already destroyed) pipeline handler instance * afterwards. Therefore, we move the cameras_ vector to a local temporary * container to avoid accessing freed memory later i.e. to explicitly run * cameras_.clear(). */ std::vector<std::weak_ptr<Camera>> cameras{ std::move(cameras_) }; for (const std::weak_ptr<Camera> &ptr : cameras) { std::shared_ptr<Camera> camera = ptr.lock(); if (!camera) continue; camera->disconnect(); manager_->_d()->removeCamera(camera); } } /** * \var PipelineHandler::manager_ * \brief The Camera manager associated with the pipeline handler * * The camera manager pointer is stored in the pipeline handler for the * convenience of pipeline handler implementations. It remains valid and * constant for the whole lifetime of the pipeline handler. */ /** * \fn PipelineHandler::name() * \brief Retrieve the pipeline handler name * \context This function shall be \threadsafe. * \return The pipeline handler name */ /** * \class PipelineHandlerFactoryBase * \brief Base class for pipeline handler factories * * The PipelineHandlerFactoryBase class is the base of all specializations of * the PipelineHandlerFactory class template. It implements the factory * registration, maintains a registry of factories, and provides access to the * registered factories. */ /** * \brief Construct a pipeline handler factory base * \param[in] name Name of the pipeline handler class * * Creating an instance of the factory base registers it with the global list of * factories, accessible through the factories() function. * * The factory \a name is used for debug purpose and shall be unique. */ PipelineHandlerFactoryBase::PipelineHandlerFactoryBase(const char *name) : name_(name) { registerType(this); } /** * \brief Create an instance of the PipelineHandler corresponding to the factory * \param[in] manager The camera manager * * \return A shared pointer to a new instance of the PipelineHandler subclass * corresponding to the factory */ std::shared_ptr<PipelineHandler> PipelineHandlerFactoryBase::create(CameraManager *manager) const { std::unique_ptr<PipelineHandler> handler = createInstance(manager); handler->name_ = name_.c_str(); return std::shared_ptr<PipelineHandler>(std::move(handler)); } /** * \fn PipelineHandlerFactoryBase::name() * \brief Retrieve the factory name * \return The factory name */ /** * \brief Add a pipeline handler class to the registry * \param[in] factory Factory to use to construct the pipeline handler * * The caller is responsible to guarantee the uniqueness of the pipeline handler * name. */ void PipelineHandlerFactoryBase::registerType(PipelineHandlerFactoryBase *factory) { std::vector<PipelineHandlerFactoryBase *> &factories = PipelineHandlerFactoryBase::factories(); factories.push_back(factory); } /** * \brief Retrieve the list of all pipeline handler factories * \return the list of pipeline handler factories */ std::vector<PipelineHandlerFactoryBase *> &PipelineHandlerFactoryBase::factories() { /* * The static factories map is defined inside the function to ensure * it gets initialized on first use, without any dependency on * link order. */ static std::vector<PipelineHandlerFactoryBase *> factories; return factories; } /** * \brief Return the factory for the pipeline handler with name \a name * \param[in] name The pipeline handler name * \return The factory of the pipeline with name \a name, or nullptr if not found */ const PipelineHandlerFactoryBase *PipelineHandlerFactoryBase::getFactoryByName(const std::string &name) { const std::vector<PipelineHandlerFactoryBase *> &factories = PipelineHandlerFactoryBase::factories(); auto iter = std::find_if(factories.begin(), factories.end(), [&name](const PipelineHandlerFactoryBase *f) { return f->name() == name; }); if (iter != factories.end()) return *iter; return nullptr; } /** * \class PipelineHandlerFactory * \brief Registration of PipelineHandler classes and creation of instances * \tparam _PipelineHandler The pipeline handler class type for this factory * * To facilitate discovery and instantiation of PipelineHandler classes, the * PipelineHandlerFactory class implements auto-registration of pipeline * handlers. Each PipelineHandler subclass shall register itself using the * REGISTER_PIPELINE_HANDLER() macro, which will create a corresponding * instance of a PipelineHandlerFactory and register it with the static list of * factories. */ /** * \fn PipelineHandlerFactory::PipelineHandlerFactory(const char *name) * \brief Construct a pipeline handler factory * \param[in] name Name of the pipeline handler class * * Creating an instance of the factory registers it with the global list of * factories, accessible through the factories() function. * * The factory \a name is used for debug purpose and shall be unique. */ /** * \fn PipelineHandlerFactory::createInstance() const * \brief Create an instance of the PipelineHandler corresponding to the factory * \param[in] manager The camera manager * \return A unique pointer to a newly constructed instance of the * PipelineHandler subclass corresponding to the factory */ /** * \def REGISTER_PIPELINE_HANDLER * \brief Register a pipeline handler with the pipeline handler factory * \param[in] handler Class name of PipelineHandler derived class to register * \param[in] name Name assigned to the pipeline handler, matching the pipeline * subdirectory name in the source tree. * * Register a PipelineHandler subclass with the factory and make it available to * try and match devices. */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_proxy.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Image Processing Algorithm proxy */ #include "libcamera/internal/ipa_proxy.h" #include <string.h> #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include "libcamera/internal/ipa_module.h" /** * \file ipa_proxy.h * \brief IPA Proxy */ namespace libcamera { LOG_DEFINE_CATEGORY(IPAProxy) /** * \class IPAProxy * \brief IPA Proxy * * Isolate IPA into separate process. */ /** * \enum IPAProxy::ProxyState * \brief Identifies the available operational states of the proxy * * \var IPAProxy::ProxyStopped * \brief The proxy is not active and only synchronous operations are permitted * \var IPAProxy::ProxyStopping * \brief No new tasks can be submitted to the proxy, however existing events * can be completed * \var IPAProxy::ProxyRunning * \brief The Proxy is active and asynchronous tasks may be queued */ /** * \brief Construct an IPAProxy instance * \param[in] ipam The IPA module */ IPAProxy::IPAProxy(IPAModule *ipam) : valid_(false), state_(ProxyStopped), ipam_(ipam) { } IPAProxy::~IPAProxy() { } /** * \fn IPAProxy::isValid() * \brief Check if the IPAProxy instance is valid * * An IPAProxy instance is valid if the IPA interface is successfully created in * isolation, and IPC is successfully set up. * * \return True if the IPAProxy is valid, false otherwise */ /** * \brief Retrieve the absolute path to an IPA configuration file * \param[in] name The configuration file name * * This function locates the configuration file for an IPA and returns its * absolute path. It searches the following directories, in order: * * - All directories specified in the colon-separated LIBCAMERA_IPA_CONFIG_PATH * environment variable ; or * - If libcamera is not installed, the src/ipa/ directory within the source * tree ; otherwise * - The system sysconf (etc/libcamera/ipa) and the data (share/libcamera/ipa/) * directories. * * The system directories are not searched if libcamera is not installed. * * Within each of those directories, the function looks for a subdirectory * named after the IPA module name, as reported in IPAModuleInfo::name, and for * a file named \a name within that directory. The \a name is IPA-specific. * * \return The full path to the IPA configuration file, or an empty string if * no configuration file can be found */ std::string IPAProxy::configurationFile(const std::string &name) const { struct stat statbuf; int ret; /* * The IPA module name can be used as-is to build directory names as it * has been validated when loading the module. */ std::string ipaName = ipam_->info().name; /* Check the environment variable first. */ const char *confPaths = utils::secure_getenv("LIBCAMERA_IPA_CONFIG_PATH"); if (confPaths) { for (const auto &dir : utils::split(confPaths, ":")) { if (dir.empty()) continue; std::string confPath = dir + "/" + ipaName + "/" + name; ret = stat(confPath.c_str(), &statbuf); if (ret == 0 && (statbuf.st_mode & S_IFMT) == S_IFREG) return confPath; } } std::string root = utils::libcameraSourcePath(); if (!root.empty()) { /* * When libcamera is used before it is installed, load * configuration files from the source directory. The * configuration files are then located in the 'data' * subdirectory of the corresponding IPA module. */ std::string ipaConfDir = root + "src/ipa/" + ipaName + "/data"; LOG(IPAProxy, Info) << "libcamera is not installed. Loading IPA configuration from '" << ipaConfDir << "'"; std::string confPath = ipaConfDir + "/" + name; ret = stat(confPath.c_str(), &statbuf); if (ret == 0 && (statbuf.st_mode & S_IFMT) == S_IFREG) return confPath; } else { /* Else look in the system locations. */ for (const auto &dir : utils::split(IPA_CONFIG_DIR, ":")) { std::string confPath = dir + "/" + ipaName + "/" + name; ret = stat(confPath.c_str(), &statbuf); if (ret == 0 && (statbuf.st_mode & S_IFMT) == S_IFREG) return confPath; } } LOG(IPAProxy, Error) << "Configuration file '" << name << "' not found for IPA module '" << ipaName << "'"; return std::string(); } /** * \brief Find a valid full path for a proxy worker for a given executable name * \param[in] file File name of proxy worker executable * * A proxy worker's executable could be found in either the global installation * directory, or in the paths specified by the environment variable * LIBCAMERA_IPA_PROXY_PATH. This function checks the global install directory * first, then LIBCAMERA_IPA_PROXY_PATH in order, and returns the full path to * the proxy worker executable that is specified by file. The proxy worker * executable shall have exec permission. * * \return The full path to the proxy worker executable, or an empty string if * no valid executable path */ std::string IPAProxy::resolvePath(const std::string &file) const { std::string proxyFile = "/" + file; /* Check env variable first. */ const char *execPaths = utils::secure_getenv("LIBCAMERA_IPA_PROXY_PATH"); if (execPaths) { for (const auto &dir : utils::split(execPaths, ":")) { if (dir.empty()) continue; std::string proxyPath = dir; proxyPath += proxyFile; if (!access(proxyPath.c_str(), X_OK)) return proxyPath; } } /* * When libcamera is used before it is installed, load proxy workers * from the same build directory as the libcamera directory itself. * This requires identifying the path of the libcamera.so, and * referencing a relative path for the proxy workers from that point. */ std::string root = utils::libcameraBuildPath(); if (!root.empty()) { std::string ipaProxyDir = root + "src/libcamera/proxy/worker"; LOG(IPAProxy, Info) << "libcamera is not installed. Loading proxy workers from '" << ipaProxyDir << "'"; std::string proxyPath = ipaProxyDir + proxyFile; if (!access(proxyPath.c_str(), X_OK)) return proxyPath; return std::string(); } /* Else try finding the exec target from the install directory. */ std::string proxyPath = std::string(IPA_PROXY_DIR) + proxyFile; if (!access(proxyPath.c_str(), X_OK)) return proxyPath; return std::string(); } /** * \var IPAProxy::valid_ * \brief Flag to indicate if the IPAProxy instance is valid * * A IPAProxy instance is valid if the IPA interface is successfully created in * isolation, and IPC is successfully set up. * * This flag can be read via IPAProxy::isValid(). * * Implementations of the IPAProxy class should set this flag upon successful * construction. */ /** * \var IPAProxy::state_ * \brief Current state of the IPAProxy * * The IPAProxy can be Running, Stopped, or Stopping. * * This state provides a means to ensure that asynchronous functions are only * called while the proxy is running, and prevent new tasks being submitted * while still enabling events to complete when the IPAProxy is stopping. */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_validator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Control validator */ #include "libcamera/internal/control_validator.h" /** * \file control_validator.h * \brief Abstract control validator */ namespace libcamera { /** * \class ControlValidator * \brief Interface for the control validator * * The ControlValidator class is used by the ControlList class to validate * controls added to the list. It is an abstract class providing an interface * for object-specific control validation, such a Camera controls and V4L2 * controls. */ /** * \fn ControlValidator::name() * \brief Retrieve the name of the object associated with the validator * \return The name of the object associated with the validator */ /** * \fn ControlValidator::validate() * \brief Validate a control * \param[in] id The control ID * * This function validates the control \a id against the object corresponding to * the validator. It shall at least validate that the control is applicable to * the object instance, and may perform additional checks. * * \return True if the control is valid, false otherwise */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/transform.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Raspberry Pi Ltd * * 2D plane transforms. */ #include <libcamera/transform.h> #include <libcamera/orientation.h> /** * \file transform.h * \brief Enum to represent and manipulate 2D plane transforms */ namespace libcamera { /** * \enum Transform * \brief Enum to represent a 2D plane transform * * The Transform can take 8 distinct values, representing the usual 2D plane * transforms listed below. Each of these transforms can be constructed * out of 3 basic operations, namely a horizontal flip (mirror), a vertical * flip, and a transposition (about the main diagonal). The transforms are * encoded such that a single bit indicates the presence of each of the 3 * basic operations: * * - bit 0 - presence of a horizontal flip * - bit 1 - presence of a vertical flip * - bit 2 - presence of a transposition. * * We regard these 3 basic operations as being applied in a specific order: * first the two flip operations (actually they commute, so the order between * them is unimportant) and finally any transpose operation. * * Functions are provided to manipulate directly the bits within the transform * encoding, but there are also higher-level functions to invert and compose * transforms. Transforms are composed according to the usual mathematical * convention such that the right transform is applied first, and the left * transform is applied second. * * Finally, we have a total of 8 distinct transformations, as follows (a * couple of them have additional synonyms for convenience). We illustrate each * with its nominal effect on a rectangle with vertices labelled A, B, C and D. * * \sa https://en.wikipedia.org/wiki/Examples_of_groups#dihedral_group_of_order_8 * * The set of 2D plane transforms is also known as the symmetry group of a * square, described in the link. Note that the group can be generated by * only 2 elements (the horizontal flip and a 90 degree rotation, for * example), however, the encoding used here makes the presence of the vertical * flip explicit. * * \var Transform::Identity * * Identity transform. ~~~ A-B A-B Input image | | goes to output image | | C-D C-D ~~~ * Numeric value: 0 (no bits set). * * \var Transform::Rot0 * * Synonym for Transform::Identity (zero degree rotation). * * \var Transform::HFlip * * Horizontal flip. ~~~ A-B B-A Input image | | goes to output image | | C-D D-C ~~~ * Numeric value: 1 (horizontal flip bit set only). * * \var Transform::VFlip * * Vertical flip. ~~~ A-B C-D Input image | | goes to output image | | C-D A-B ~~~ * Numeric value: 2 (vertical flip bit set only). * * \var Transform::HVFlip * * Horizontal and vertical flip (identical to a 180 degree rotation). ~~~ A-B D-C Input image | | goes to output image | | C-D B-A ~~~ * Numeric value: 3 (horizontal and vertical flip bits set). * * \var Transform::Rot180 * * Synonym for `HVFlip` (180 degree rotation). * * \var Transform::Transpose * * Transpose (about the main diagonal). ~~~ A-B A-C Input image | | goes to output image | | C-D B-D ~~~ * Numeric value: 4 (transpose bit set only). * * \var Transform::Rot270 * * Rotation by 270 degrees clockwise (90 degrees anticlockwise). ~~~ A-B B-D Input image | | goes to output image | | C-D A-C ~~~ * Numeric value: 5 (transpose and horizontal flip bits set). * * \var Transform::Rot90 * * Rotation by 90 degrees clockwise (270 degrees anticlockwise). ~~~ A-B C-A Input image | | goes to output image | | C-D D-B ~~~ * Numeric value: 6 (transpose and vertical flip bits set). * * \var Transform::Rot180Transpose * * Rotation by 180 degrees followed by transpose (alternatively, transposition * about the "opposite diagonal"). ~~~ A-B D-B Input image | | goes to output image | | C-D C-A ~~~ * Numeric value: 7 (all bits set). */ /** * \fn operator &(Transform t0, Transform t1) * \brief Apply bitwise AND operator between the bits in the two transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \fn operator |(Transform t0, Transform t1) * \brief Apply bitwise OR operator between the bits in the two transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \fn operator ^(Transform t0, Transform t1) * \brief Apply bitwise XOR operator between the bits in the two transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \fn operator &=(Transform &t0, Transform t1) * \brief Apply bitwise AND-assignment operator between the bits in the two * transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \fn operator |=(Transform &t0, Transform t1) * \brief Apply bitwise OR-assignment operator between the bits in the two * transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \fn operator ^=(Transform &t0, Transform t1) * \brief Apply bitwise XOR-assignment operator between the bits in the two * transforms * \param[in] t0 The first transform * \param[in] t1 The second transform */ /** * \brief Compose two transforms by applying \a t0 first then \a t1 * \param[in] t0 The first transform to apply * \param[in] t1 The second transform to apply * * Compose two transforms into a transform that is equivalent to first applying * \a t0 and then applying \a t1. For example, `HFlip * Transpose` performs * `HFlip` first and then the `Transpose` yielding `Rot270`, as shown below. ~~~ A-B B-A B-D Input image | | -> HFLip -> | | -> Transpose -> | | = Rot270 C-D D-C A-C ~~~ * Note that composition is generally non-commutative for Transforms, and not * the same as XOR-ing the underlying bit representations. * * \return A Transform equivalent to applying \a t0 and then \a t1 */ Transform operator*(Transform t0, Transform t1) { /* * Reorder the operations so that we imagine doing t0's transpose * (if any) after t1's flips. The effect is to swap t1's hflips for * vflips and vice versa, after which we can just xor all the bits. */ Transform reordered = t1; if (!!(t0 & Transform::Transpose)) { reordered = t1 & Transform::Transpose; if (!!(t1 & Transform::HFlip)) reordered |= Transform::VFlip; if (!!(t1 & Transform::VFlip)) reordered |= Transform::HFlip; } return reordered ^ t0; } /** * \brief Invert a transform * \param[in] t The transform to be inverted * * That is, we return the transform such that `t * (-t)` and `(-t) * t` both * yield the identity transform. */ Transform operator-(Transform t) { /* All are self-inverses, except for Rot270 and Rot90. */ static const Transform inverses[] = { Transform::Identity, Transform::HFlip, Transform::VFlip, Transform::HVFlip, Transform::Transpose, Transform::Rot90, Transform::Rot270, Transform::Rot180Transpose }; return inverses[static_cast<int>(t)]; } /** * \fn operator!(Transform t) * \brief Return `true` if the transform is the `Identity`, otherwise `false` * \param[in] t The transform to be tested */ /** * \fn operator~(Transform t) * \brief Return the transform with all the bits inverted individually * \param[in] t The transform of which the bits will be inverted * * This inverts the bits that encode the transform in a bitwise manner. Note * that this is not the proper inverse of transform \a t (for which use \a * operator-). */ /** * \brief Return the transform representing a rotation of the given angle * clockwise * \param[in] angle The angle of rotation in a clockwise sense. Negative values * can be used to represent anticlockwise rotations * \param[out] success Set to `true` if the angle is a multiple of 90 degrees, * otherwise `false` * \return The transform corresponding to the rotation if \a success was set to * `true`, otherwise the `Identity` transform */ Transform transformFromRotation(int angle, bool *success) { angle = angle % 360; if (angle < 0) angle += 360; if (success != nullptr) *success = true; switch (angle) { case 0: return Transform::Identity; case 90: return Transform::Rot90; case 180: return Transform::Rot180; case 270: return Transform::Rot270; } if (success != nullptr) *success = false; return Transform::Identity; } namespace { /** * \brief Return the transform representing \a orientation * \param[in] orientation The orientation to convert * \return The transform corresponding to \a orientation */ Transform transformFromOrientation(const Orientation &orientation) { switch (orientation) { case Orientation::Rotate0: return Transform::Identity; case Orientation::Rotate0Mirror: return Transform::HFlip; case Orientation::Rotate180: return Transform::Rot180; case Orientation::Rotate180Mirror: return Transform::VFlip; case Orientation::Rotate90Mirror: return Transform::Transpose; case Orientation::Rotate90: return Transform::Rot90; case Orientation::Rotate270Mirror: return Transform::Rot180Transpose; case Orientation::Rotate270: return Transform::Rot270; } return Transform::Identity; } } /* namespace */ /** * \brief Return the Transform that applied to \a o2 gives \a o1 * \param o1 The Orientation to obtain * \param o2 The base Orientation * * This operation can be used to easily compute the Transform to apply to a * base orientation \a o2 to get the desired orientation \a o1. * * \return A Transform that applied to \a o2 gives \a o1 */ Transform operator/(const Orientation &o1, const Orientation &o2) { Transform t1 = transformFromOrientation(o1); Transform t2 = transformFromOrientation(o2); return -t2 * t1; } /** * \brief Apply the Transform \a t on the orientation \a o * \param o The orientation * \param t The transform to apply on \a o * \return The Orientation resulting from applying \a t on \a o */ Orientation operator*(const Orientation &o, const Transform &t) { /* * Apply a Transform corresponding to the orientation first and * then apply \a t to it. */ switch (transformFromOrientation(o) * t) { case Transform::Identity: return Orientation::Rotate0; case Transform::HFlip: return Orientation::Rotate0Mirror; case Transform::VFlip: return Orientation::Rotate180Mirror; case Transform::Rot180: return Orientation::Rotate180; case Transform::Transpose: return Orientation::Rotate90Mirror; case Transform::Rot270: return Orientation::Rotate270; case Transform::Rot90: return Orientation::Rotate90; case Transform::Rot180Transpose: return Orientation::Rotate270Mirror; } return Orientation::Rotate0; } /** * \brief Return a character string describing the transform * \param[in] t The transform to be described. */ const char *transformToString(Transform t) { static const char *strings[] = { "identity", "hflip", "vflip", "hvflip", "transpose", "rot270", "rot90", "rot180transpose" }; return strings[static_cast<int>(t)]; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/formats.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera image formats */ #include "libcamera/internal/formats.h" #include <algorithm> #include <errno.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include <libcamera/formats.h> /** * \file internal/formats.h * \brief Types and helper functions to handle libcamera image formats */ namespace libcamera { LOG_DEFINE_CATEGORY(Formats) /** * \class PixelFormatInfo * \brief Information about pixel formats * * The PixelFormatInfo class groups together information describing a pixel * format. It facilitates handling of pixel formats by providing data commonly * used in pipeline handlers. * * \var PixelFormatInfo::name * \brief The format name as a human-readable string, used as the text * representation of the PixelFormat * * \var PixelFormatInfo::format * \brief The PixelFormat described by this instance * * \var PixelFormatInfo::v4l2Formats * \brief The V4L2 pixel formats corresponding to the PixelFormat * * Multiple V4L2 formats may exist for one PixelFormat, as V4L2 defines * separate 4CCs for contiguous and non-contiguous versions of the same image * format. * * \var PixelFormatInfo::bitsPerPixel * \brief The average number of bits per pixel * * The number of bits per pixel averages the total number of bits for all * colour components over the whole image, excluding any padding bits or * padding pixels. * * For formats that store pixels with bit padding within words, only the * effective bits are taken into account. For instance, 12-bit Bayer data * stored in two bytes per pixel report 12, not 16, in this field. * * Formats that don't have a fixed number of bits per pixel, such as compressed * formats, report 0 in this field. * * \var PixelFormatInfo::colourEncoding * \brief The colour encoding type * * \var PixelFormatInfo::packed * \brief Tell if multiple pixels are packed in the same bytes * * Packed formats are defined as storing data from multiple pixels in the same * bytes. For instance, 12-bit Bayer data with two pixels stored in three bytes * is packed, while the same data stored with 4 bits of padding in two bytes * per pixel is not packed. * * \var PixelFormatInfo::pixelsPerGroup * \brief The number of pixels in a pixel group * * A pixel group is defined as the minimum number of pixels (including padding) * necessary in a row when the image has only one column of effective pixels. * pixelsPerGroup refers to this value. PixelFormatInfo::Plane::bytesPerGroup, * then, refers to the number of bytes that a pixel group consumes. This * definition of a pixel group allows simple calculation of stride, as * ceil(width / pixelsPerGroup) * bytesPerGroup. These values are determined * only in terms of a row. The ceiling accounts for padding. * * A pixel group has a second constraint, such that the pixel group * (bytesPerGroup and pixelsPerGroup) is the smallest repeatable unit. * What this means is that, for example, in the IPU3 formats, if there is only * one column of effective pixels, it looks like it could be fit in 5 bytes * with 3 padding pixels (for a total of 4 pixels over 5 bytes). However, this * unit is not repeatable, as at the 7th group in the same row, the pattern * is broken. Therefore, the pixel group for IPU3 formats must be 25 pixels * over 32 bytes. * * For example, for something simple like BGR888, it is self-explanatory: * the pixel group size is 1, and the bytes necessary is 3, and there is * only one plane with no (= 1) vertical subsampling. For YUYV, the * CbCr pair is shared between two pixels, so even if you have only one * pixel, you would still need a padded second Y sample, therefore the pixel * group size is 2, and bytes necessary is 4. YUYV also has no vertical * subsampling. NV12 has a pixel group size of 2 pixels, due to the CbCr plane. * The bytes per group then, for both planes, is 2. The first plane has no * vertical subsampling, but the second plane is subsampled by a factor of 2. * * The IPU3 raw Bayer formats are single-planar, and have a pixel group size of * 25, consuming 32 bytes, due to the packing pattern being repeated in memory * every 32 bytes. The IPU3 hardware, however, has an additional constraint on * the DMA burst size, requiring lines to be multiple of 64 bytes. This isn't an * intrinsic property of the formats and is thus not reflected here. It is * instead enforced by the corresponding pipeline handler. * * \var PixelFormatInfo::planes * \brief Information about pixels for each plane * * \sa PixelFormatInfo::Plane */ /** * \enum PixelFormatInfo::ColourEncoding * \brief The colour encoding type * * \var PixelFormatInfo::ColourEncodingRGB * \brief RGB colour encoding * * \var PixelFormatInfo::ColourEncodingYUV * \brief YUV colour encoding * * \var PixelFormatInfo::ColourEncodingRAW * \brief RAW colour encoding */ /** * \struct PixelFormatInfo::Plane * \brief Information about a single plane of a pixel format * * \var PixelFormatInfo::Plane::bytesPerGroup * \brief The number of bytes that a pixel group consumes * * \sa PixelFormatInfo::pixelsPerGroup * * \var PixelFormatInfo::Plane::verticalSubSampling * \brief Vertical subsampling multiplier * * This value is the ratio between the number of rows of pixels in the frame * to the number of rows of pixels in the plane. */ namespace { const PixelFormatInfo pixelFormatInfoInvalid{}; const std::map<PixelFormat, PixelFormatInfo> pixelFormatInfo{ /* RGB formats. */ { formats::RGB565, { .name = "RGB565", .format = formats::RGB565, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGB565), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGB565_BE, { .name = "RGB565_BE", .format = formats::RGB565_BE, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGB565X), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::BGR888, { .name = "BGR888", .format = formats::BGR888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGB24), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGB888, { .name = "RGB888", .format = formats::RGB888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_BGR24), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::XRGB8888, { .name = "XRGB8888", .format = formats::XRGB8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_XBGR32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::XBGR8888, { .name = "XBGR8888", .format = formats::XBGR8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGBX32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGBX8888, { .name = "RGBX8888", .format = formats::RGBX8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_BGRX32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::BGRX8888, { .name = "BGRX8888", .format = formats::BGRX8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_XRGB32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::ABGR8888, { .name = "ABGR8888", .format = formats::ABGR8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGBA32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::ARGB8888, { .name = "ARGB8888", .format = formats::ARGB8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_ABGR32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::BGRA8888, { .name = "BGRA8888", .format = formats::BGRA8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_ARGB32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGBA8888, { .name = "RGBA8888", .format = formats::RGBA8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_BGRA32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::BGR161616, { .name = "BGR161616", .format = formats::BGR161616, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_RGB48), }, .bitsPerPixel = 48, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGB161616, { .name = "RGB161616", .format = formats::RGB161616, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_BGR48), }, .bitsPerPixel = 48, .colourEncoding = PixelFormatInfo::ColourEncodingRGB, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, /* YUV packed formats. */ { formats::YUYV, { .name = "YUYV", .format = formats::YUYV, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUYV), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::YVYU, { .name = "YVYU", .format = formats::YVYU, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YVYU), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::UYVY, { .name = "UYVY", .format = formats::UYVY, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_UYVY), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::VYUY, { .name = "VYUY", .format = formats::VYUY, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_VYUY), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::AVUY8888, { .name = "AVUY8888", .format = formats::AVUY8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUVA32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::XVUY8888, { .name = "XVUY8888", .format = formats::XVUY8888, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUVX32), }, .bitsPerPixel = 32, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, /* YUV planar formats. */ { formats::NV12, { .name = "NV12", .format = formats::NV12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV12), V4L2PixelFormat(V4L2_PIX_FMT_NV12M), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 2, 2 }, { 0, 0 } }}, } }, { formats::NV21, { .name = "NV21", .format = formats::NV21, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV21), V4L2PixelFormat(V4L2_PIX_FMT_NV21M), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 2, 2 }, { 0, 0 } }}, } }, { formats::NV16, { .name = "NV16", .format = formats::NV16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV16), V4L2PixelFormat(V4L2_PIX_FMT_NV16M), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 2, 1 }, { 0, 0 } }}, } }, { formats::NV61, { .name = "NV61", .format = formats::NV61, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV61), V4L2PixelFormat(V4L2_PIX_FMT_NV61M), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 2, 1 }, { 0, 0 } }}, } }, { formats::NV24, { .name = "NV24", .format = formats::NV24, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV24), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 2, 1 }, { 0, 0 } }}, } }, { formats::NV42, { .name = "NV42", .format = formats::NV42, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_NV42), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 2, 1 }, { 0, 0 } }}, } }, { formats::YUV420, { .name = "YUV420", .format = formats::YUV420, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUV420), V4L2PixelFormat(V4L2_PIX_FMT_YUV420M), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 1, 2 }, { 1, 2 } }}, } }, { formats::YVU420, { .name = "YVU420", .format = formats::YVU420, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YVU420), V4L2PixelFormat(V4L2_PIX_FMT_YVU420M), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 1, 2 }, { 1, 2 } }}, } }, { formats::YUV422, { .name = "YUV422", .format = formats::YUV422, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUV422P), V4L2PixelFormat(V4L2_PIX_FMT_YUV422M), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 1, 1 }, { 1, 1 } }}, } }, { formats::YVU422, { .name = "YVU422", .format = formats::YVU422, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YVU422M), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 1, 1 }, { 1, 1 } }}, } }, { formats::YUV444, { .name = "YUV444", .format = formats::YUV444, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YUV444M), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 1, 1 }, { 1, 1 } }}, } }, { formats::YVU444, { .name = "YVU444", .format = formats::YVU444, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_YVU444M), }, .bitsPerPixel = 24, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 1, 1 }, { 1, 1 } }}, } }, /* Greyscale formats. */ { formats::R8, { .name = "R8", .format = formats::R8, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_GREY), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::R10, { .name = "R10", .format = formats::R10, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_Y10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::R10_CSI2P, { .name = "R10_CSI2P", .format = formats::R10_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_Y10P), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 5, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::R12, { .name = "R12", .format = formats::R12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_Y12), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::R16, { .name = "R16", .format = formats::R16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_Y16), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::MONO_PISP_COMP1, { .name = "MONO_PISP_COMP1", .format = formats::MONO_PISP_COMP1, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_MONO), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = true, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 0, 0 }, { 0, 0 } }}, } }, /* Bayer formats. */ { formats::SBGGR8, { .name = "SBGGR8", .format = formats::SBGGR8, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR8), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG8, { .name = "SGBRG8", .format = formats::SGBRG8, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG8), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG8, { .name = "SGRBG8", .format = formats::SGRBG8, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG8), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB8, { .name = "SRGGB8", .format = formats::SRGGB8, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB8), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR10, { .name = "SBGGR10", .format = formats::SBGGR10, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG10, { .name = "SGBRG10", .format = formats::SGBRG10, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG10, { .name = "SGRBG10", .format = formats::SGRBG10, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB10, { .name = "SRGGB10", .format = formats::SRGGB10, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR10_CSI2P, { .name = "SBGGR10_CSI2P", .format = formats::SBGGR10_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10P), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 5, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG10_CSI2P, { .name = "SGBRG10_CSI2P", .format = formats::SGBRG10_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10P), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 5, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG10_CSI2P, { .name = "SGRBG10_CSI2P", .format = formats::SGRBG10_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10P), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 5, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB10_CSI2P, { .name = "SRGGB10_CSI2P", .format = formats::SRGGB10_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10P), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 5, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR12, { .name = "SBGGR12", .format = formats::SBGGR12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG12, { .name = "SGBRG12", .format = formats::SGBRG12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG12, { .name = "SGRBG12", .format = formats::SGRBG12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB12, { .name = "SRGGB12", .format = formats::SRGGB12, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR12_CSI2P, { .name = "SBGGR12_CSI2P", .format = formats::SBGGR12_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12P), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG12_CSI2P, { .name = "SGBRG12_CSI2P", .format = formats::SGBRG12_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12P), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG12_CSI2P, { .name = "SGRBG12_CSI2P", .format = formats::SGRBG12_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12P), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB12_CSI2P, { .name = "SRGGB12_CSI2P", .format = formats::SRGGB12_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12P), }, .bitsPerPixel = 12, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR14, { .name = "SBGGR14", .format = formats::SBGGR14, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG14, { .name = "SGBRG14", .format = formats::SGBRG14, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG14, { .name = "SGRBG14", .format = formats::SGRBG14, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB14, { .name = "SRGGB14", .format = formats::SRGGB14, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR14_CSI2P, { .name = "SBGGR14_CSI2P", .format = formats::SBGGR14_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14P), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 7, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG14_CSI2P, { .name = "SGBRG14_CSI2P", .format = formats::SGBRG14_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14P), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 7, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG14_CSI2P, { .name = "SGRBG14_CSI2P", .format = formats::SGRBG14_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14P), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 7, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB14_CSI2P, { .name = "SRGGB14_CSI2P", .format = formats::SRGGB14_CSI2P, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14P), }, .bitsPerPixel = 14, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 4, .planes = {{ { 7, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR16, { .name = "SBGGR16", .format = formats::SBGGR16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR16), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG16, { .name = "SGBRG16", .format = formats::SGBRG16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG16), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG16, { .name = "SGRBG16", .format = formats::SGRBG16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG16), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB16, { .name = "SRGGB16", .format = formats::SRGGB16, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB16), }, .bitsPerPixel = 16, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = false, .pixelsPerGroup = 2, .planes = {{ { 4, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SBGGR10_IPU3, { .name = "SBGGR10_IPU3", .format = formats::SBGGR10_IPU3, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SBGGR10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, /* \todo remember to double this in the ipu3 pipeline handler */ .pixelsPerGroup = 25, .planes = {{ { 32, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGBRG10_IPU3, { .name = "SGBRG10_IPU3", .format = formats::SGBRG10_IPU3, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGBRG10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 25, .planes = {{ { 32, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SGRBG10_IPU3, { .name = "SGRBG10_IPU3", .format = formats::SGRBG10_IPU3, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGRBG10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 25, .planes = {{ { 32, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::SRGGB10_IPU3, { .name = "SRGGB10_IPU3", .format = formats::SRGGB10_IPU3, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SRGGB10), }, .bitsPerPixel = 10, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 25, .planes = {{ { 32, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::BGGR_PISP_COMP1, { .name = "BGGR_PISP_COMP1", .format = formats::BGGR_PISP_COMP1, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_BGGR), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::GBRG_PISP_COMP1, { .name = "GBRG_PISP_COMP1", .format = formats::GBRG_PISP_COMP1, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GBRG), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::GRBG_PISP_COMP1, { .name = "GRBG_PISP_COMP1", .format = formats::GRBG_PISP_COMP1, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GRBG), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, { formats::RGGB_PISP_COMP1, { .name = "RGGB_PISP_COMP1", .format = formats::RGGB_PISP_COMP1, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_RGGB), }, .bitsPerPixel = 8, .colourEncoding = PixelFormatInfo::ColourEncodingRAW, .packed = true, .pixelsPerGroup = 2, .planes = {{ { 2, 1 }, { 0, 0 }, { 0, 0 } }}, } }, /* Compressed formats. */ { formats::MJPEG, { .name = "MJPEG", .format = formats::MJPEG, .v4l2Formats = { V4L2PixelFormat(V4L2_PIX_FMT_MJPEG), V4L2PixelFormat(V4L2_PIX_FMT_JPEG), }, .bitsPerPixel = 0, .colourEncoding = PixelFormatInfo::ColourEncodingYUV, .packed = false, .pixelsPerGroup = 1, .planes = {{ { 1, 1 }, { 0, 0 }, { 0, 0 } }}, } }, }; } /* namespace */ /** * \fn bool PixelFormatInfo::isValid() const * \brief Check if the pixel format info is valid * \return True if the pixel format info is valid, false otherwise */ /** * \brief Retrieve information about a pixel format * \param[in] format The pixel format * \return The PixelFormatInfo describing the \a format if known, or an invalid * PixelFormatInfo otherwise */ const PixelFormatInfo &PixelFormatInfo::info(const PixelFormat &format) { const auto iter = pixelFormatInfo.find(format); if (iter == pixelFormatInfo.end()) { LOG(Formats, Warning) << "Unsupported pixel format " << utils::hex(format.fourcc()); return pixelFormatInfoInvalid; } return iter->second; } /** * \brief Retrieve information about a V4L2 pixel format * \param[in] format The V4L2 pixel format * \return The PixelFormatInfo describing the V4L2 \a format if known, or an * invalid PixelFormatInfo otherwise */ const PixelFormatInfo &PixelFormatInfo::info(const V4L2PixelFormat &format) { PixelFormat pixelFormat = format.toPixelFormat(false); if (!pixelFormat.isValid()) return pixelFormatInfoInvalid; const auto iter = pixelFormatInfo.find(pixelFormat); if (iter == pixelFormatInfo.end()) return pixelFormatInfoInvalid; return iter->second; } /** * \brief Retrieve information about a pixel format * \param[in] name The name of pixel format * \return The PixelFormatInfo describing the PixelFormat matching the * \a name if known, or an invalid PixelFormatInfo otherwise */ const PixelFormatInfo &PixelFormatInfo::info(const std::string &name) { for (const auto &info : pixelFormatInfo) { if (info.second.name == name) return info.second; } return pixelFormatInfoInvalid; } /** * \brief Compute the stride * \param[in] width The width of the line, in pixels * \param[in] plane The index of the plane whose stride is to be computed * \param[in] align The stride alignment, in bytes * * The stride is the number of bytes necessary to store a full line of a frame, * including padding at the end of the line. This function takes into account * the alignment constraints intrinsic to the format (for instance, the * SGRBG12_CSI2P format stores two 12-bit pixels in 3 bytes, and thus has a * required stride alignment of 3 bytes). Additional alignment constraints may * be specified through the \a align parameter, which will cause the stride to * be rounded up to the next multiple of \a align. * * For multi-planar formats, different planes may have different stride values. * The \a plane parameter selects which plane to compute the stride for. * * \return The number of bytes necessary to store a line, or 0 if the * PixelFormatInfo instance or the \a plane is not valid */ unsigned int PixelFormatInfo::stride(unsigned int width, unsigned int plane, unsigned int align) const { if (!isValid()) { LOG(Formats, Warning) << "Invalid pixel format, stride is zero"; return 0; } if (plane >= planes.size() || !planes[plane].bytesPerGroup) { LOG(Formats, Warning) << "Invalid plane index, stride is zero"; return 0; } /* ceil(width / pixelsPerGroup) * bytesPerGroup */ unsigned int stride = (width + pixelsPerGroup - 1) / pixelsPerGroup * planes[plane].bytesPerGroup; /* ceil(stride / align) * align */ return (stride + align - 1) / align * align; } /** * \brief Compute the number of bytes necessary to store a plane of a frame * \param[in] size The size of the frame, in pixels * \param[in] plane The plane index * \param[in] align The stride alignment, in bytes (1 for default alignment) * * The plane size is computed by multiplying the line stride and the frame * height, taking subsampling and other format characteristics into account. * Stride alignment constraints may be specified through the \a align parameter. * * \sa stride() * * \return The number of bytes necessary to store the plane, or 0 if the * PixelFormatInfo instance is not valid or the plane number isn't valid for the * format */ unsigned int PixelFormatInfo::planeSize(const Size &size, unsigned int plane, unsigned int align) const { unsigned int stride = PixelFormatInfo::stride(size.width, plane, align); if (!stride) return 0; return planeSize(size.height, plane, stride); } /** * \brief Compute the number of bytes necessary to store a plane of a frame * \param[in] height The height of the frame, in pixels * \param[in] plane The plane index * \param[in] stride The plane stride, in bytes * * The plane size is computed by multiplying the line stride and the frame * height, taking subsampling and other format characteristics into account. * Stride alignment constraints may be specified through the \a align parameter. * * \return The number of bytes necessary to store the plane, or 0 if the * PixelFormatInfo instance is not valid or the plane number isn't valid for the * format */ unsigned int PixelFormatInfo::planeSize(unsigned int height, unsigned int plane, unsigned int stride) const { unsigned int vertSubSample = planes[plane].verticalSubSampling; if (!vertSubSample) return 0; /* stride * ceil(height / verticalSubSampling) */ return stride * ((height + vertSubSample - 1) / vertSubSample); } /** * \brief Compute the number of bytes necessary to store a frame * \param[in] size The size of the frame, in pixels * \param[in] align The stride alignment, in bytes (1 for default alignment) * * The frame size is computed by adding the size of all planes, as computed by * planeSize(), using the specified alignment constraints for all planes. For * more complex stride constraints, use the frameSize() overloaded version that * takes an array of stride values. * * \sa planeSize() * * \return The number of bytes necessary to store the frame, or 0 if the * PixelFormatInfo instance is not valid */ unsigned int PixelFormatInfo::frameSize(const Size &size, unsigned int align) const { unsigned int sum = 0; for (const auto &[i, plane] : utils::enumerate(planes)) { if (plane.bytesPerGroup == 0) break; sum += planeSize(size, i, align); } return sum; } /** * \brief Compute the number of bytes necessary to store a frame * \param[in] size The size of the frame, in pixels * \param[in] strides The strides to use for each plane * * This function is an overloaded version that takes custom strides for each * plane, to be used when the device has custom alignment constraints that * can't be described by just an alignment value. * * \return The number of bytes necessary to store the frame, or 0 if the * PixelFormatInfo instance is not valid */ unsigned int PixelFormatInfo::frameSize(const Size &size, const std::array<unsigned int, 3> &strides) const { /* stride * ceil(height / verticalSubSampling) */ unsigned int sum = 0; for (unsigned int i = 0; i < 3; i++) { unsigned int vertSubSample = planes[i].verticalSubSampling; if (!vertSubSample) continue; sum += strides[i] * ((size.height + vertSubSample - 1) / vertSubSample); } return sum; } /** * \brief Retrieve the number of planes represented by the format * \return The number of planes used by the format */ unsigned int PixelFormatInfo::numPlanes() const { unsigned int count = 0; for (const Plane &p : planes) { if (p.bytesPerGroup == 0) break; count++; } return count; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/formats.yaml
# SPDX-License-Identifier: LGPL-2.1-or-later # # Copyright (C) 2020, Google Inc. # %YAML 1.1 --- formats: - R8: fourcc: DRM_FORMAT_R8 - R10: fourcc: DRM_FORMAT_R10 - R12: fourcc: DRM_FORMAT_R12 - R16: fourcc: DRM_FORMAT_R16 - RGB565: fourcc: DRM_FORMAT_RGB565 - RGB565_BE: fourcc: DRM_FORMAT_RGB565 big_endian: true - RGB888: fourcc: DRM_FORMAT_RGB888 - BGR888: fourcc: DRM_FORMAT_BGR888 - XRGB8888: fourcc: DRM_FORMAT_XRGB8888 - XBGR8888: fourcc: DRM_FORMAT_XBGR8888 - RGBX8888: fourcc: DRM_FORMAT_RGBX8888 - BGRX8888: fourcc: DRM_FORMAT_BGRX8888 - ARGB8888: fourcc: DRM_FORMAT_ARGB8888 - ABGR8888: fourcc: DRM_FORMAT_ABGR8888 - RGBA8888: fourcc: DRM_FORMAT_RGBA8888 - BGRA8888: fourcc: DRM_FORMAT_BGRA8888 - RGB161616: fourcc: DRM_FORMAT_RGB161616 - BGR161616: fourcc: DRM_FORMAT_BGR161616 - YUYV: fourcc: DRM_FORMAT_YUYV - YVYU: fourcc: DRM_FORMAT_YVYU - UYVY: fourcc: DRM_FORMAT_UYVY - VYUY: fourcc: DRM_FORMAT_VYUY - AVUY8888: fourcc: DRM_FORMAT_AVUY8888 - XVUY8888: fourcc: DRM_FORMAT_XVUY8888 - NV12: fourcc: DRM_FORMAT_NV12 - NV21: fourcc: DRM_FORMAT_NV21 - NV16: fourcc: DRM_FORMAT_NV16 - NV61: fourcc: DRM_FORMAT_NV61 - NV24: fourcc: DRM_FORMAT_NV24 - NV42: fourcc: DRM_FORMAT_NV42 - YUV420: fourcc: DRM_FORMAT_YUV420 - YVU420: fourcc: DRM_FORMAT_YVU420 - YUV422: fourcc: DRM_FORMAT_YUV422 - YVU422: fourcc: DRM_FORMAT_YVU422 - YUV444: fourcc: DRM_FORMAT_YUV444 - YVU444: fourcc: DRM_FORMAT_YVU444 - MJPEG: fourcc: DRM_FORMAT_MJPEG - SRGGB8: fourcc: DRM_FORMAT_SRGGB8 - SGRBG8: fourcc: DRM_FORMAT_SGRBG8 - SGBRG8: fourcc: DRM_FORMAT_SGBRG8 - SBGGR8: fourcc: DRM_FORMAT_SBGGR8 - SRGGB10: fourcc: DRM_FORMAT_SRGGB10 - SGRBG10: fourcc: DRM_FORMAT_SGRBG10 - SGBRG10: fourcc: DRM_FORMAT_SGBRG10 - SBGGR10: fourcc: DRM_FORMAT_SBGGR10 - SRGGB12: fourcc: DRM_FORMAT_SRGGB12 - SGRBG12: fourcc: DRM_FORMAT_SGRBG12 - SGBRG12: fourcc: DRM_FORMAT_SGBRG12 - SBGGR12: fourcc: DRM_FORMAT_SBGGR12 - SRGGB14: fourcc: DRM_FORMAT_SRGGB14 - SGRBG14: fourcc: DRM_FORMAT_SGRBG14 - SGBRG14: fourcc: DRM_FORMAT_SGBRG14 - SBGGR14: fourcc: DRM_FORMAT_SBGGR14 - SRGGB16: fourcc: DRM_FORMAT_SRGGB16 - SGRBG16: fourcc: DRM_FORMAT_SGRBG16 - SGBRG16: fourcc: DRM_FORMAT_SGBRG16 - SBGGR16: fourcc: DRM_FORMAT_SBGGR16 - R10_CSI2P: fourcc: DRM_FORMAT_R10 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SRGGB10_CSI2P: fourcc: DRM_FORMAT_SRGGB10 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGRBG10_CSI2P: fourcc: DRM_FORMAT_SGRBG10 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGBRG10_CSI2P: fourcc: DRM_FORMAT_SGBRG10 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SBGGR10_CSI2P: fourcc: DRM_FORMAT_SBGGR10 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SRGGB12_CSI2P: fourcc: DRM_FORMAT_SRGGB12 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGRBG12_CSI2P: fourcc: DRM_FORMAT_SGRBG12 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGBRG12_CSI2P: fourcc: DRM_FORMAT_SGBRG12 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SBGGR12_CSI2P: fourcc: DRM_FORMAT_SBGGR12 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SRGGB14_CSI2P: fourcc: DRM_FORMAT_SRGGB14 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGRBG14_CSI2P: fourcc: DRM_FORMAT_SGRBG14 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SGBRG14_CSI2P: fourcc: DRM_FORMAT_SGBRG14 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SBGGR14_CSI2P: fourcc: DRM_FORMAT_SBGGR14 mod: MIPI_FORMAT_MOD_CSI2_PACKED - SRGGB10_IPU3: fourcc: DRM_FORMAT_SRGGB10 mod: IPU3_FORMAT_MOD_PACKED - SGRBG10_IPU3: fourcc: DRM_FORMAT_SGRBG10 mod: IPU3_FORMAT_MOD_PACKED - SGBRG10_IPU3: fourcc: DRM_FORMAT_SGBRG10 mod: IPU3_FORMAT_MOD_PACKED - SBGGR10_IPU3: fourcc: DRM_FORMAT_SBGGR10 mod: IPU3_FORMAT_MOD_PACKED - RGGB_PISP_COMP1: fourcc: DRM_FORMAT_SRGGB16 mod: PISP_FORMAT_MOD_COMPRESS_MODE1 - GRBG_PISP_COMP1: fourcc: DRM_FORMAT_SGRBG16 mod: PISP_FORMAT_MOD_COMPRESS_MODE1 - GBRG_PISP_COMP1: fourcc: DRM_FORMAT_SGBRG16 mod: PISP_FORMAT_MOD_COMPRESS_MODE1 - BGGR_PISP_COMP1: fourcc: DRM_FORMAT_SBGGR16 mod: PISP_FORMAT_MOD_COMPRESS_MODE1 - MONO_PISP_COMP1: fourcc: DRM_FORMAT_R16 mod: PISP_FORMAT_MOD_COMPRESS_MODE1 ...
0
repos/libcamera/src
repos/libcamera/src/libcamera/controls.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Control handling */ #include <libcamera/controls.h> #include <iomanip> #include <sstream> #include <string> #include <string.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include "libcamera/internal/control_validator.h" /** * \file controls.h * \brief Framework to manage controls related to an object * * A control is a mean to govern or influence the operation of an object, and in * particular of a camera. Every control is defined by a unique numerical ID, a * name string and the data type of the value it stores. The libcamera API * defines a set of standard controls in the libcamera::controls namespace, as * a set of instances of the Control class. * * The main way for applications to interact with controls is through the * ControlList stored in the Request class: * * \code{.cpp} * Request *req = ...; * ControlList &controls = req->controls(); * controls->set(controls::AwbEnable, false); * controls->set(controls::ManualExposure, 1000); * * ... * * int32_t exposure = controls->get(controls::ManualExposure); * \endcode * * The ControlList::get() and ControlList::set() functions automatically deduce * the data type based on the control. */ namespace libcamera { LOG_DEFINE_CATEGORY(Controls) namespace { static constexpr size_t ControlValueSize[] = { [ControlTypeNone] = 0, [ControlTypeBool] = sizeof(bool), [ControlTypeByte] = sizeof(uint8_t), [ControlTypeInteger32] = sizeof(int32_t), [ControlTypeInteger64] = sizeof(int64_t), [ControlTypeFloat] = sizeof(float), [ControlTypeString] = sizeof(char), [ControlTypeRectangle] = sizeof(Rectangle), [ControlTypeSize] = sizeof(Size), }; } /* namespace */ /** * \enum ControlType * \brief Define the data type of a Control * \var ControlTypeNone * Invalid type, for empty values * \var ControlTypeBool * The control stores a boolean value * \var ControlTypeByte * The control stores a byte value as an unsigned 8-bit integer * \var ControlTypeInteger32 * The control stores a 32-bit integer value * \var ControlTypeInteger64 * The control stores a 64-bit integer value * \var ControlTypeFloat * The control stores a 32-bit floating point value * \var ControlTypeString * The control stores a string value as an array of char */ /** * \class ControlValue * \brief Abstract type representing the value of a control */ /** \todo Revisit the ControlValue layout when stabilizing the ABI */ static_assert(sizeof(ControlValue) == 16, "Invalid size of ControlValue class"); /** * \brief Construct an empty ControlValue. */ ControlValue::ControlValue() : type_(ControlTypeNone), isArray_(false), numElements_(0) { } /** * \fn template<typename T> T ControlValue::ControlValue(const T &value) * \brief Construct a ControlValue of type T * \param[in] value Initial value * * This function constructs a new instance of ControlValue and stores the \a * value inside it. If the type \a T is equivalent to Span<R>, the instance * stores an array of values of type \a R. Otherwise the instance stores a * single value of type \a T. The numElements() and type() are updated to * reflect the stored value. */ void ControlValue::release() { std::size_t size = numElements_ * ControlValueSize[type_]; if (size > sizeof(value_)) { delete[] reinterpret_cast<uint8_t *>(storage_); storage_ = nullptr; } } ControlValue::~ControlValue() { release(); } /** * \brief Construct a ControlValue with the content of \a other * \param[in] other The ControlValue to copy content from */ ControlValue::ControlValue(const ControlValue &other) : type_(ControlTypeNone), numElements_(0) { *this = other; } /** * \brief Replace the content of the ControlValue with a copy of the content * of \a other * \param[in] other The ControlValue to copy content from * \return The ControlValue with its content replaced with the one of \a other */ ControlValue &ControlValue::operator=(const ControlValue &other) { set(other.type_, other.isArray_, other.data().data(), other.numElements_, ControlValueSize[other.type_]); return *this; } /** * \fn ControlValue::type() * \brief Retrieve the data type of the value * \return The value data type */ /** * \fn ControlValue::isNone() * \brief Determine if the value is not initialised * \return True if the value type is ControlTypeNone, false otherwise */ /** * \fn ControlValue::isArray() * \brief Determine if the value stores an array * \return True if the value stores an array, false otherwise */ /** * \fn ControlValue::numElements() * \brief Retrieve the number of elements stored in the ControlValue * * For instances storing an array, this function returns the number of elements * in the array. For instances storing a string, it returns the length of the * string, not counting the terminating '\0'. Otherwise, it returns 1. * * \return The number of elements stored in the ControlValue */ /** * \brief Retrieve the raw data of a control value * \return The raw data of the control value as a span of uint8_t */ Span<const uint8_t> ControlValue::data() const { std::size_t size = numElements_ * ControlValueSize[type_]; const uint8_t *data = size > sizeof(value_) ? reinterpret_cast<const uint8_t *>(storage_) : reinterpret_cast<const uint8_t *>(&value_); return { data, size }; } /** * \copydoc ControlValue::data() const */ Span<uint8_t> ControlValue::data() { Span<const uint8_t> data = const_cast<const ControlValue *>(this)->data(); return { const_cast<uint8_t *>(data.data()), data.size() }; } /** * \brief Assemble and return a string describing the value * \return A string describing the ControlValue */ std::string ControlValue::toString() const { if (type_ == ControlTypeNone) return "<ValueType Error>"; const uint8_t *data = ControlValue::data().data(); if (type_ == ControlTypeString) return std::string(reinterpret_cast<const char *>(data), numElements_); std::string str(isArray_ ? "[ " : ""); for (unsigned int i = 0; i < numElements_; ++i) { switch (type_) { case ControlTypeBool: { const bool *value = reinterpret_cast<const bool *>(data); str += *value ? "true" : "false"; break; } case ControlTypeByte: { const uint8_t *value = reinterpret_cast<const uint8_t *>(data); str += std::to_string(*value); break; } case ControlTypeInteger32: { const int32_t *value = reinterpret_cast<const int32_t *>(data); str += std::to_string(*value); break; } case ControlTypeInteger64: { const int64_t *value = reinterpret_cast<const int64_t *>(data); str += std::to_string(*value); break; } case ControlTypeFloat: { const float *value = reinterpret_cast<const float *>(data); str += std::to_string(*value); break; } case ControlTypeRectangle: { const Rectangle *value = reinterpret_cast<const Rectangle *>(data); str += value->toString(); break; } case ControlTypeSize: { const Size *value = reinterpret_cast<const Size *>(data); str += value->toString(); break; } case ControlTypeNone: case ControlTypeString: break; } if (i + 1 != numElements_) str += ", "; data += ControlValueSize[type_]; } if (isArray_) str += " ]"; return str; } /** * \brief Compare ControlValue instances for equality * \return True if the values have identical types and values, false otherwise */ bool ControlValue::operator==(const ControlValue &other) const { if (type_ != other.type_) return false; if (numElements_ != other.numElements()) return false; if (isArray_ != other.isArray_) return false; return memcmp(data().data(), other.data().data(), data().size()) == 0; } /** * \fn bool ControlValue::operator!=() * \brief Compare ControlValue instances for non equality * \return False if the values have identical types and values, true otherwise */ /** * \fn template<typename T> T ControlValue::get() const * \brief Get the control value * * This function returns the contained value as an instance of \a T. If the * ControlValue instance stores a single value, the type \a T shall match the * stored value type(). If the instance stores an array of values, the type * \a T should be equal to Span<const R>, and the type \a R shall match the * stored value type(). The behaviour is undefined otherwise. * * Note that a ControlValue instance that stores a non-array value is not * equivalent to an instance that stores an array value containing a single * element. The latter shall be accessed through a Span<const R> type, while * the former shall be accessed through a type \a T corresponding to type(). * * \return The control value */ /** * \fn template<typename T> void ControlValue::set(const T &value) * \brief Set the control value to \a value * \param[in] value The control value * * This function stores the \a value in the instance. If the type \a T is * equivalent to Span<R>, the instance stores an array of values of type \a R. * Otherwise the instance stores a single value of type \a T. The numElements() * and type() are updated to reflect the stored value. * * The entire content of \a value is copied to the instance, no reference to \a * value or to the data it references is retained. This may be an expensive * operation for Span<> values that refer to large arrays. */ void ControlValue::set(ControlType type, bool isArray, const void *data, std::size_t numElements, std::size_t elementSize) { ASSERT(elementSize == ControlValueSize[type]); reserve(type, isArray, numElements); Span<uint8_t> storage = ControlValue::data(); memcpy(storage.data(), data, storage.size()); } /** * \brief Set the control type and reserve memory * \param[in] type The control type * \param[in] isArray True to make the value an array * \param[in] numElements The number of elements * * This function sets the type of the control value to \a type, and reserves * memory to store the control value. If \a isArray is true, the instance * becomes an array control and storage for \a numElements is reserved. * Otherwise the instance becomes a simple control, numElements is ignored, and * storage for the single element is reserved. */ void ControlValue::reserve(ControlType type, bool isArray, std::size_t numElements) { if (!isArray) numElements = 1; std::size_t oldSize = numElements_ * ControlValueSize[type_]; std::size_t newSize = numElements * ControlValueSize[type]; if (oldSize != newSize) release(); type_ = type; isArray_ = isArray; numElements_ = numElements; if (oldSize == newSize) return; if (newSize > sizeof(value_)) storage_ = reinterpret_cast<void *>(new uint8_t[newSize]); } /** * \class ControlId * \brief Control static metadata * * The ControlId class stores a control ID, name and data type. It provides * unique identification of a control, but without support for compile-time * type deduction that the derived template Control class supports. See the * Control class for more information. */ /** * \fn ControlId::ControlId(unsigned int id, const std::string &name, ControlType type) * \brief Construct a ControlId instance * \param[in] id The control numerical ID * \param[in] name The control name * \param[in] type The control data type */ /** * \fn unsigned int ControlId::id() const * \brief Retrieve the control numerical ID * \return The control numerical ID */ /** * \fn const char *ControlId::name() const * \brief Retrieve the control name * \return The control name */ /** * \fn ControlType ControlId::type() const * \brief Retrieve the control data type * \return The control data type */ /** * \fn bool operator==(unsigned int lhs, const ControlId &rhs) * \brief Compare a ControlId with a control numerical ID * \param[in] lhs Left-hand side numerical ID * \param[in] rhs Right-hand side ControlId * * \return True if \a lhs is equal to \a rhs.id(), false otherwise */ /** * \fn bool operator==(const ControlId &lhs, unsigned int rhs) * \brief Compare a ControlId with a control numerical ID * \param[in] lhs Left-hand side ControlId * \param[in] rhs Right-hand side numerical ID * * \return True if \a lhs.id() is equal to \a rhs, false otherwise */ /** * \class Control * \brief Describe a control and its intrinsic properties * * The Control class models a control exposed by an object. Its template type * name T refers to the control data type, and allows functions that operate on * control values to be defined as template functions using the same type T for * the control value. See for instance how the ControlList::get() function * returns a value corresponding to the type of the requested control. * * While this class is the main means to refer to a control, the control * identifying information is stored in the non-template base ControlId class. * This allows code that operates on a set of controls of different types to * reference those controls through a ControlId instead of a Control. For * instance, the list of controls supported by a camera is exposed as ControlId * instead of Control. * * Controls of any type can be defined through template specialisation, but * libcamera only supports the bool, uint8_t, int32_t, int64_t and float types * natively (this includes types that are equivalent to the supported types, * such as int and long int). * * Controls IDs shall be unique. While nothing prevents multiple instances of * the Control class to be created with the same ID for the same object, doing * so may cause undefined behaviour. */ /** * \fn Control::Control(unsigned int id, const char *name) * \brief Construct a Control instance * \param[in] id The control numerical ID * \param[in] name The control name * * The control data type is automatically deduced from the template type T. */ /** * \typedef Control::type * \brief The Control template type T */ /** * \class ControlInfo * \brief Describe the limits of valid values for a Control * * The ControlInfo expresses the constraints on valid values for a control. * The constraints depend on the object the control applies to, and are * constant for the lifetime of that object. They are typically constructed by * pipeline handlers to describe the controls they support. */ /** * \brief Construct a ControlInfo with minimum and maximum range parameters * \param[in] min The control minimum value * \param[in] max The control maximum value * \param[in] def The control default value */ ControlInfo::ControlInfo(const ControlValue &min, const ControlValue &max, const ControlValue &def) : min_(min), max_(max), def_(def) { } /** * \brief Construct a ControlInfo from the list of valid values * \param[in] values The control valid values * \param[in] def The control default value * * Construct a ControlInfo from a list of valid values. The ControlInfo * minimum and maximum values are set to the first and last members of the * values list respectively. The default value is set to \a def if provided, or * to the minimum value otherwise. */ ControlInfo::ControlInfo(Span<const ControlValue> values, const ControlValue &def) { min_ = values.front(); max_ = values.back(); def_ = !def.isNone() ? def : values.front(); values_.reserve(values.size()); for (const ControlValue &value : values) values_.push_back(value); } /** * \brief Construct a boolean ControlInfo with both boolean values * \param[in] values The control valid boolean values (both true and false) * \param[in] def The control default boolean value * * Construct a ControlInfo for a boolean control, where both true and false are * valid values. \a values must be { false, true } (the order is irrelevant). * The minimum value will always be false, and the maximum always true. The * default value is \a def. */ ControlInfo::ControlInfo(std::set<bool> values, bool def) : min_(false), max_(true), def_(def), values_({ false, true }) { ASSERT(values.count(def) && values.size() == 2); } /** * \brief Construct a boolean ControlInfo with only one valid value * \param[in] value The control valid boolean value * * Construct a ControlInfo for a boolean control, where there is only valid * value. The minimum, maximum, and default values will all be \a value. */ ControlInfo::ControlInfo(bool value) : min_(value), max_(value), def_(value) { values_ = { value }; } /** * \fn ControlInfo::min() * \brief Retrieve the minimum value of the control * * For string controls, this is the minimum length of the string, not counting * the terminating '\0'. For all other control types, this is the minimum value * of each element. * * \return A ControlValue with the minimum value for the control */ /** * \fn ControlInfo::max() * \brief Retrieve the maximum value of the control * * For string controls, this is the maximum length of the string, not counting * the terminating '\0'. For all other control types, this is the maximum value * of each element. * * \return A ControlValue with the maximum value for the control */ /** * \fn ControlInfo::def() * \brief Retrieve the default value of the control * \return A ControlValue with the default value for the control */ /** * \fn ControlInfo::values() * \brief Retrieve the list of valid values * * For controls that support a pre-defined number of values, the enumeration of * those is reported through a vector of ControlValue instances accessible with * this function. * * \return A vector of ControlValue representing the control valid values */ /** * \brief Provide a string representation of the ControlInfo */ std::string ControlInfo::toString() const { std::stringstream ss; ss << "[" << min_.toString() << ".." << max_.toString() << "]"; return ss.str(); } /** * \fn bool ControlInfo::operator==() * \brief Compare ControlInfo instances for equality * \return True if the constraints have identical min and max, false otherwise */ /** * \fn bool ControlInfo::operator!=() * \brief Compare ControlInfo instances for non equality * \return True if the constraints have different min and max, false otherwise */ /** * \typedef ControlIdMap * \brief A map of numerical control ID to ControlId * * The map is used by ControlList instances to access controls by numerical * IDs. A global map of all libcamera controls is provided by * controls::controls. */ /** * \class ControlInfoMap * \brief A map of ControlId to ControlInfo * * The ControlInfoMap class describes controls supported by an object as an * unsorted map of ControlId pointers to ControlInfo instances. Unlike the * standard std::unsorted_map<> class, it is designed to be immutable once * constructed, and thus only exposes the read accessors of the * std::unsorted_map<> base class. * * The class is constructed with a reference to a ControlIdMap. This allows * providing access to the mapped elements using numerical ID keys, in addition * to the features of the standard unsorted map. All ControlId keys in the map * must appear in the ControlIdMap. */ /** * \typedef ControlInfoMap::Map * \brief The base std::unsorted_map<> container */ /** * \fn ControlInfoMap::ControlInfoMap(const ControlInfoMap &other) * \brief Copy constructor, construct a ControlInfoMap from a copy of \a other * \param[in] other The other ControlInfoMap */ /** * \brief Construct a ControlInfoMap from an initializer list * \param[in] init The initializer list * \param[in] idmap The idmap used by the ControlInfoMap */ ControlInfoMap::ControlInfoMap(std::initializer_list<Map::value_type> init, const ControlIdMap &idmap) : Map(init), idmap_(&idmap) { ASSERT(validate()); } /** * \brief Construct a ControlInfoMap from a plain map * \param[in] info The control info plain map * \param[in] idmap The idmap used by the ControlInfoMap * * Construct a new ControlInfoMap and populate its contents with those of * \a info using move semantics. Upon return the \a info map will be empty. */ ControlInfoMap::ControlInfoMap(Map &&info, const ControlIdMap &idmap) : Map(std::move(info)), idmap_(&idmap) { ASSERT(validate()); } /** * \fn ControlInfoMap &ControlInfoMap::operator=(const ControlInfoMap &other) * \brief Copy assignment operator, replace the contents with a copy of \a other * \param[in] other The other ControlInfoMap * \return A reference to the ControlInfoMap */ bool ControlInfoMap::validate() { if (!idmap_) return false; for (const auto &ctrl : *this) { const ControlId *id = ctrl.first; auto it = idmap_->find(id->id()); /* * Make sure all control ids are part of the idmap and verify * the control info matches the expected type. */ if (it == idmap_->end() || it->second != id) { LOG(Controls, Error) << "Control " << utils::hex(id->id()) << " not in the idmap"; return false; } /* * For string controls, min and max define the valid * range for the string size, not for the individual * values. */ ControlType rangeType = id->type() == ControlTypeString ? ControlTypeInteger32 : id->type(); const ControlInfo &info = ctrl.second; if (info.min().type() != rangeType) { LOG(Controls, Error) << "Control " << utils::hex(id->id()) << " type and info type mismatch"; return false; } } return true; } /** * \brief Access specified element by numerical ID * \param[in] id The numerical ID * \return A reference to the element whose ID is equal to \a id */ ControlInfoMap::mapped_type &ControlInfoMap::at(unsigned int id) { ASSERT(idmap_); return at(idmap_->at(id)); } /** * \brief Access specified element by numerical ID * \param[in] id The numerical ID * \return A const reference to the element whose ID is equal to \a id */ const ControlInfoMap::mapped_type &ControlInfoMap::at(unsigned int id) const { ASSERT(idmap_); return at(idmap_->at(id)); } /** * \brief Count the number of elements matching a numerical ID * \param[in] id The numerical ID * \return The number of elements matching the numerical \a id */ ControlInfoMap::size_type ControlInfoMap::count(unsigned int id) const { if (!idmap_) return 0; /* * The ControlInfoMap and its idmap have a 1:1 mapping between their * entries, we can thus just count the matching entries in idmap to * avoid an additional lookup. */ return idmap_->count(id); } /** * \brief Find the element matching a numerical ID * \param[in] id The numerical ID * \return An iterator pointing to the element matching the numerical \a id, or * end() if no such element exists */ ControlInfoMap::iterator ControlInfoMap::find(unsigned int id) { if (!idmap_) return end(); auto iter = idmap_->find(id); if (iter == idmap_->end()) return end(); return find(iter->second); } /** * \brief Find the element matching a numerical ID * \param[in] id The numerical ID * \return A const iterator pointing to the element matching the numerical * \a id, or end() if no such element exists */ ControlInfoMap::const_iterator ControlInfoMap::find(unsigned int id) const { if (!idmap_) return end(); auto iter = idmap_->find(id); if (iter == idmap_->end()) return end(); return find(iter->second); } /** * \fn const ControlIdMap &ControlInfoMap::idmap() const * \brief Retrieve the ControlId map * * Constructing ControlList instances for V4L2 controls requires a ControlIdMap * for the V4L2 device that the control list targets. This helper function * returns a suitable idmap for that purpose. * * \return The ControlId map */ /** * \class ControlList * \brief Associate a list of ControlId with their values for an object * * The ControlList class stores values of controls exposed by an object. The * lists returned by the Request::controls() and Request::metadata() functions * refer to the camera that the request belongs to. * * Control lists are constructed with a map of all the controls supported by * their object, and an optional ControlValidator to further validate the * controls. */ /** * \brief Construct a ControlList not associated with any object * * This constructor is meant to support ControlList serialization and shall not * be used directly by application. */ ControlList::ControlList() : validator_(nullptr), idmap_(nullptr), infoMap_(nullptr) { } /** * \brief Construct a ControlList with an optional control validator * \param[in] idmap The ControlId map for the control list target object * \param[in] validator The validator (may be null) * * For ControlList containing libcamera controls, a global map of all libcamera * controls is provided by controls::controls and can be used as the \a idmap * argument. */ ControlList::ControlList(const ControlIdMap &idmap, const ControlValidator *validator) : validator_(validator), idmap_(&idmap), infoMap_(nullptr) { } /** * \brief Construct a ControlList with the idmap of a control info map * \param[in] infoMap The ControlInfoMap for the control list target object * \param[in] validator The validator (may be null) */ ControlList::ControlList(const ControlInfoMap &infoMap, const ControlValidator *validator) : validator_(validator), idmap_(&infoMap.idmap()), infoMap_(&infoMap) { } /** * \typedef ControlList::iterator * \brief Iterator for the controls contained within the list */ /** * \typedef ControlList::const_iterator * \brief Const iterator for the controls contained within the list */ /** * \fn iterator ControlList::begin() * \brief Retrieve an iterator to the first Control in the list * \return An iterator to the first Control in the list */ /** * \fn const_iterator ControlList::begin() const * \brief Retrieve a const_iterator to the first Control in the list * \return A const_iterator to the first Control in the list */ /** * \fn iterator ControlList::end() * \brief Retrieve an iterator pointing to the past-the-end control in the list * \return An iterator to the element following the last control in the list */ /** * \fn const_iterator ControlList::end() const * \brief Retrieve a const iterator pointing to the past-the-end control in the * list * \return A const iterator to the element following the last control in the * list */ /** * \fn ControlList::empty() * \brief Identify if the list is empty * \return True if the list does not contain any control, false otherwise */ /** * \fn ControlList::size() * \brief Retrieve the number of controls in the list * \return The number of Control entries stored in the list */ /** * \fn ControlList::clear() * \brief Removes all controls from the list */ /** * \enum ControlList::MergePolicy * \brief The policy used by the merge function * * \var ControlList::MergePolicy::KeepExisting * \brief Existing controls in the target list are kept * * \var ControlList::MergePolicy::OverwriteExisting * \brief Existing controls in the target list are updated */ /** * \brief Merge the \a source into the ControlList * \param[in] source The ControlList to merge into this object * \param[in] policy Controls if existing elements in *this shall be * overwritten * * Merging two control lists copies elements from the \a source and inserts * them in *this. If the \a source contains elements whose key is already * present in *this, then those elements are only overwritten if * \a policy is MergePolicy::OverwriteExisting. * * Only control lists created from the same ControlIdMap or ControlInfoMap may * be merged. Attempting to do otherwise results in undefined behaviour. * * \todo Reimplement or implement an overloaded version which internally uses * std::unordered_map::merge() and accepts a non-const argument. */ void ControlList::merge(const ControlList &source, MergePolicy policy) { /** * \todo ASSERT that the current and source ControlList are derived * from a compatible ControlIdMap, to prevent undefined behaviour due to * id collisions. * * This can not currently be a direct pointer comparison due to the * duplication of the ControlIdMaps in the isolated IPA use cases. * Furthermore, manually checking each entry of the id map is identical * is expensive. * See https://bugs.libcamera.org/show_bug.cgi?id=31 for further details */ for (const auto &ctrl : source) { if (policy == MergePolicy::KeepExisting && contains(ctrl.first)) { const ControlId *id = idmap_->at(ctrl.first); LOG(Controls, Warning) << "Control " << id->name() << " not overwritten"; continue; } set(ctrl.first, ctrl.second); } } /** * \brief Check if the list contains a control with the specified \a id * \param[in] id The control numerical ID * * \return True if the list contains a matching control, false otherwise */ bool ControlList::contains(unsigned int id) const { return controls_.find(id) != controls_.end(); } /** * \fn ControlList::get(const Control<T> &ctrl) const * \brief Get the value of control \a ctrl * \param[in] ctrl The control * * Beside getting the value of a control, this function can also be used to * check if a control is present in the ControlList by converting the returned * std::optional<T> to bool (or calling its has_value() function). * * \return A std::optional<T> containing the control value, or std::nullopt if * the control \a ctrl is not present in the list */ /** * \fn ControlList::set(const Control<T> &ctrl, const V &value) * \brief Set the control \a ctrl value to \a value * \param[in] ctrl The control * \param[in] value The control value * * This function sets the value of a control in the control list. If the control * is already present in the list, its value is updated, otherwise it is added * to the list. * * The behaviour is undefined if the control \a ctrl is not supported by the * object that the list refers to. */ /** * \fn ControlList::set(const Control<Span<T, Size>> &ctrl, const std::initializer_list<V> &value) * \copydoc ControlList::set(const Control<T> &ctrl, const V &value) */ /** * \brief Get the value of control \a id * \param[in] id The control numerical ID * * The behaviour is undefined if the control \a id is not present in the list. * Use ControlList::contains() to test for the presence of a control in the * list before retrieving its value. * * \return The control value */ const ControlValue &ControlList::get(unsigned int id) const { static const ControlValue zero; const ControlValue *val = find(id); if (!val) return zero; return *val; } /** * \brief Set the value of control \a id to \a value * \param[in] id The control ID * \param[in] value The control value * * This function sets the value of a control in the control list. If the control * is already present in the list, its value is updated, otherwise it is added * to the list. * * The behaviour is undefined if the control \a id is not supported by the * object that the list refers to. */ void ControlList::set(unsigned int id, const ControlValue &value) { ControlValue *val = find(id); if (!val) return; *val = value; } /** * \fn ControlList::infoMap() * \brief Retrieve the ControlInfoMap used to construct the ControlList * * \return The ControlInfoMap used to construct the ControlList. ControlList * instances constructed with ControlList() or * ControlList(const ControlIdMap &idmap, ControlValidator *validator) have no * associated ControlInfoMap, nullptr is returned in that case. */ /** * \fn ControlList::idMap() * \brief Retrieve the ControlId map used to construct the ControlList * \return The ControlId map used to construct the ControlList. ControlList * instances constructed with the default contructor have no associated idmap, * nullptr is returned in that case. */ const ControlValue *ControlList::find(unsigned int id) const { const auto iter = controls_.find(id); if (iter == controls_.end()) { LOG(Controls, Error) << "Control " << utils::hex(id) << " not found"; return nullptr; } return &iter->second; } ControlValue *ControlList::find(unsigned int id) { if (validator_ && !validator_->validate(id)) { LOG(Controls, Error) << "Control " << utils::hex(id) << " is not valid for " << validator_->name(); return nullptr; } return &controls_[id]; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/shared_mem_object.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2023 Raspberry Pi Ltd * Copyright (C) 2024 Andrei Konovalov * Copyright (C) 2024 Dennis Bonke * Copyright (C) 2024 Ideas on Board Oy * * Helpers for shared memory allocations */ #include "libcamera/internal/shared_mem_object.h" #include <stddef.h> #include <stdint.h> #include <sys/mman.h> #include <sys/syscall.h> #include <sys/types.h> #include <unistd.h> /** * \file shared_mem_object.cpp * \brief Helpers for shared memory allocations */ namespace libcamera { /** * \class SharedMem * \brief Helper class to allocate and manage memory shareable between processes * * SharedMem manages memory suitable for sharing between processes. When an * instance is constructed, it allocates a memory buffer of the requested size * backed by an anonymous file, using the memfd API. * * The allocated memory is exposed by the mem() function. If memory allocation * fails, the function returns an empty Span. This can be also checked using the * bool() operator. * * The file descriptor for the backing file is exposed as a SharedFD by the fd() * function. It can be shared with other processes across IPC boundaries, which * can then map the memory with mmap(). * * A single memfd is created for every SharedMem. If there is a need to allocate * a large number of objects in shared memory, these objects should be grouped * together and use the shared memory allocated by a single SharedMem object if * possible. This will help to minimize the number of created memfd's. */ SharedMem::SharedMem() = default; /** * \brief Construct a SharedMem with memory of the given \a size * \param[in] name Name of the SharedMem * \param[in] size Size of the shared memory to allocate and map * * The \a name is used for debugging purpose only. Multiple SharedMem instances * can have the same name. */ SharedMem::SharedMem(const std::string &name, std::size_t size) { #if HAVE_MEMFD_CREATE int fd = memfd_create(name.c_str(), MFD_CLOEXEC); #else int fd = syscall(SYS_memfd_create, name.c_str(), MFD_CLOEXEC); #endif if (fd < 0) return; fd_ = SharedFD(std::move(fd)); if (!fd_.isValid()) return; if (ftruncate(fd_.get(), size) < 0) { fd_ = SharedFD(); return; } void *mem = mmap(nullptr, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd_.get(), 0); if (mem == MAP_FAILED) { fd_ = SharedFD(); return; } mem_ = { static_cast<uint8_t *>(mem), size }; } /** * \brief Move constructor for SharedMem * \param[in] rhs The object to move */ SharedMem::SharedMem(SharedMem &&rhs) { this->fd_ = std::move(rhs.fd_); this->mem_ = rhs.mem_; rhs.mem_ = {}; } /** * \brief Destroy the SharedMem instance * * Destroying an instance invalidates the memory mapping exposed with mem(). * Other mappings of the backing file, created in this or other processes with * mmap(), remain valid. * * Similarly, other references to the backing file descriptor created by copying * the SharedFD returned by fd() remain valid. The underlying memory will be * freed only when all file descriptors that reference the anonymous file get * closed. */ SharedMem::~SharedMem() { if (!mem_.empty()) munmap(mem_.data(), mem_.size_bytes()); } /** * \brief Move assignment operator for SharedMem * \param[in] rhs The object to move */ SharedMem &SharedMem::operator=(SharedMem &&rhs) { this->fd_ = std::move(rhs.fd_); this->mem_ = rhs.mem_; rhs.mem_ = {}; return *this; } /** * \fn const SharedFD &SharedMem::fd() const * \brief Retrieve the file descriptor for the underlying shared memory * \return The file descriptor, or an invalid SharedFD if allocation failed */ /** * \fn Span<uint8_t> SharedMem::mem() const * \brief Retrieve the underlying shared memory * \return The memory buffer, or an empty Span if allocation failed */ /** * \fn SharedMem::operator bool() * \brief Check if the shared memory allocation succeeded * \return True if allocation of the shared memory succeeded, false otherwise */ /** * \class SharedMemObject * \brief Helper class to allocate an object in shareable memory * \tparam The object type * * The SharedMemObject class is a specialization of the SharedMem class that * wraps an object of type \a T and constructs it in shareable memory. It uses * the same underlying memory allocation and sharing mechanism as the SharedMem * class. * * The wrapped object is constructed at the same time as the SharedMemObject * instance, by forwarding the arguments passed to the SharedMemObject * constructor. The underlying memory allocation is sized to the object \a T * size. The bool() operator should be used to check the allocation was * successful. The object can be accessed using the dereference operators * operator*() and operator->(). * * While no restriction on the type \a T is enforced, not all types are suitable * for sharing between multiple processes. Most notably, any object type that * contains pointer or reference members will likely cause issues. Even if those * members refer to other members of the same object, the shared memory will be * mapped at different addresses in different processes, and the pointers will * not be valid. * * A new anonymous file is created for every SharedMemObject instance. If there * is a need to share a large number of small objects, these objects should be * grouped into a single larger object to limit the number of file descriptors. * * To share the object with other processes, see the SharedMem documentation. */ /** * \var SharedMemObject::kSize * \brief The size of the object stored in shared memory */ /** * \fn SharedMemObject::SharedMemObject(const std::string &name, Args &&...args) * \brief Construct a SharedMemObject * \param[in] name Name of the SharedMemObject * \param[in] args Arguments to pass to the constructor of the object T * * The \a name is used for debugging purpose only. Multiple SharedMem instances * can have the same name. */ /** * \fn SharedMemObject::SharedMemObject(SharedMemObject<T> &&rhs) * \brief Move constructor for SharedMemObject * \param[in] rhs The object to move */ /** * \fn SharedMemObject::~SharedMemObject() * \brief Destroy the SharedMemObject instance * * Destroying a SharedMemObject calls the wrapped T object's destructor. While * the underlying memory may not be freed immediately if other mappings have * been created manually (see SharedMem::~SharedMem() for more information), the * stored object may be modified. Depending on the ~T() destructor, accessing * the object after destruction of the SharedMemObject causes undefined * behaviour. It is the responsibility of the user of this class to synchronize * with other users who have access to the shared object. */ /** * \fn SharedMemObject::operator=(SharedMemObject<T> &&rhs) * \brief Move assignment operator for SharedMemObject * \param[in] rhs The SharedMemObject object to take the data from * * Moving a SharedMemObject does not affect the stored object. */ /** * \fn SharedMemObject::operator->() * \brief Dereference the stored object * \return Pointer to the stored object */ /** * \fn const T *SharedMemObject::operator->() const * \copydoc SharedMemObject::operator-> */ /** * \fn SharedMemObject::operator*() * \brief Dereference the stored object * \return Reference to the stored object */ /** * \fn const T &SharedMemObject::operator*() const * \copydoc SharedMemObject::operator* */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_serializer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Control (de)serializer */ #include "libcamera/internal/control_serializer.h" #include <algorithm> #include <memory> #include <vector> #include <libcamera/base/log.h> #include <libcamera/base/span.h> #include <libcamera/control_ids.h> #include <libcamera/controls.h> #include <libcamera/property_ids.h> #include <libcamera/ipa/ipa_controls.h> #include "libcamera/internal/byte_stream_buffer.h" /** * \file control_serializer.h * \brief Serialization and deserialization helpers for controls */ namespace libcamera { LOG_DEFINE_CATEGORY(Serializer) /** * \class ControlSerializer * \brief Serializer and deserializer for control-related classes * * The control serializer is a helper to serialize and deserialize * ControlInfoMap and ControlValue instances for the purpose of communication * with IPA modules. * * Neither the ControlInfoMap nor the ControlList are self-contained data * container. ControlInfoMap references an external ControlId in each of its * entries, and ControlList references a ControlInfoMap for the purpose of * validation. Serializing and deserializing those objects thus requires a * context that maintains the associations between them. The control serializer * fulfils this task. * * ControlInfoMap instances can be serialized on their own, but require * ControlId instances to be provided at deserialization time. The serializer * recreates those ControlId instances and stores them in an internal cache, * from which the ControlInfoMap is populated. * * ControlList instances need to be associated with a ControlInfoMap when * deserialized. To make this possible, the control lists are serialized with a * handle to their ControlInfoMap, and the map is looked up from the handle at * deserialization time. To make this possible, the serializer assigns a * numerical handle to ControlInfoMap instances when they are serialized, and * stores the mapping between handle and ControlInfoMap both when serializing * (for the pipeline handler side) and deserializing (for the IPA side) them. * This mapping is used when serializing a ControlList to include the * corresponding ControlInfoMap handle in the binary data, and when * deserializing to retrieve the corresponding ControlInfoMap. * * As independent ControlSerializer instances are used on both sides of the IPC * boundary, and the two instances operate without a shared point of control, * there is a potential risk of collision of the numerical handles assigned to * each serialized ControlInfoMap. For this reason the control serializer is * initialized with a seed and the handle is incremented by 2, so that instances * initialized with a different seed operate on a separate numerical space, * avoiding any collision risk. * * In order to perform those tasks, the serializer keeps an internal state that * needs to be properly populated. This mechanism requires the ControlInfoMap * corresponding to a ControlList to have been serialized or deserialized * before the ControlList is serialized or deserialized. Failure to comply with * that constraint results in serialization or deserialization failure of the * ControlList. * * The serializer can be reset() to clear its internal state. This may be * performed when reconfiguring an IPA to avoid constant growth of the internal * state, especially if the contents of the ControlInfoMap instances change at * that time. A reset of the serializer invalidates all ControlList and * ControlInfoMap that have been previously deserialized. The caller shall thus * proceed with care to avoid stale references. */ /** * \enum ControlSerializer::Role * \brief Define the role of the IPC component using the control serializer * * The role of the component that creates the serializer is used to initialize * the handles numerical space. * * \var ControlSerializer::Role::Proxy * \brief The control serializer is used by the IPC Proxy classes * * \var ControlSerializer::Role::Worker * \brief The control serializer is used by the IPC ProxyWorker classes */ /** * \brief Construct a new ControlSerializer * \param[in] role The role of the IPC component using the serializer */ ControlSerializer::ControlSerializer(Role role) { /* * Initialize the handle numerical space using the role of the * component that created the instance. * * Instances initialized for a different role will use a different * numerical handle space, avoiding any collision risk when, in example, * two instances of the ControlSerializer class are used at the IPC * boundaries. * * Start counting handles from '1' as '0' is a special value used as * place holder when serializing lists that do not have a ControlInfoMap * associated (in example list of libcamera controls::controls). * * \todo This is a temporary hack and should probably be better * engineered, but for the time being it avoids collisions on the handle * value when using IPC. */ serialSeed_ = role == Role::Proxy ? 1 : 2; serial_ = serialSeed_; } /** * \brief Reset the serializer * * Reset the internal state of the serializer. This invalidates all the * ControlList and ControlInfoMap that have been previously deserialized. */ void ControlSerializer::reset() { serial_ = serialSeed_; infoMapHandles_.clear(); infoMaps_.clear(); controlIds_.clear(); controlIdMaps_.clear(); } size_t ControlSerializer::binarySize(const ControlValue &value) { return sizeof(ControlType) + value.data().size_bytes(); } size_t ControlSerializer::binarySize(const ControlInfo &info) { return binarySize(info.min()) + binarySize(info.max()) + binarySize(info.def()); } /** * \brief Retrieve the size in bytes required to serialize a ControlInfoMap * \param[in] infoMap The control info map * * Compute and return the size in bytes required to store the serialized * ControlInfoMap. * * \return The size in bytes required to store the serialized ControlInfoMap */ size_t ControlSerializer::binarySize(const ControlInfoMap &infoMap) { size_t size = sizeof(struct ipa_controls_header) + infoMap.size() * sizeof(struct ipa_control_info_entry); for (const auto &ctrl : infoMap) size += binarySize(ctrl.second); return size; } /** * \brief Retrieve the size in bytes required to serialize a ControlList * \param[in] list The control list * * Compute and return the size in bytes required to store the serialized * ControlList. * * \return The size in bytes required to store the serialized ControlList */ size_t ControlSerializer::binarySize(const ControlList &list) { size_t size = sizeof(struct ipa_controls_header) + list.size() * sizeof(struct ipa_control_value_entry); for (const auto &ctrl : list) size += binarySize(ctrl.second); return size; } void ControlSerializer::store(const ControlValue &value, ByteStreamBuffer &buffer) { const ControlType type = value.type(); buffer.write(&type); buffer.write(value.data()); } void ControlSerializer::store(const ControlInfo &info, ByteStreamBuffer &buffer) { store(info.min(), buffer); store(info.max(), buffer); store(info.def(), buffer); } /** * \brief Serialize a ControlInfoMap in a buffer * \param[in] infoMap The control info map to serialize * \param[in] buffer The memory buffer where to serialize the ControlInfoMap * * Serialize the \a infoMap into the \a buffer using the serialization format * defined by the IPA context interface in ipa_controls.h. * * The serializer stores a reference to the \a infoMap internally. The caller * shall ensure that \a infoMap stays valid until the serializer is reset(). * * \return 0 on success, a negative error code otherwise * \retval -ENOSPC Not enough space is available in the buffer */ int ControlSerializer::serialize(const ControlInfoMap &infoMap, ByteStreamBuffer &buffer) { if (isCached(infoMap)) { LOG(Serializer, Debug) << "Skipping already serialized ControlInfoMap"; return 0; } /* Compute entries and data required sizes. */ size_t entriesSize = infoMap.size() * sizeof(struct ipa_control_info_entry); size_t valuesSize = 0; for (const auto &ctrl : infoMap) valuesSize += binarySize(ctrl.second); const ControlIdMap *idmap = &infoMap.idmap(); enum ipa_controls_id_map_type idMapType; if (idmap == &controls::controls) idMapType = IPA_CONTROL_ID_MAP_CONTROLS; else if (idmap == &properties::properties) idMapType = IPA_CONTROL_ID_MAP_PROPERTIES; else idMapType = IPA_CONTROL_ID_MAP_V4L2; /* Prepare the packet header. */ struct ipa_controls_header hdr; hdr.version = IPA_CONTROLS_FORMAT_VERSION; hdr.handle = serial_; hdr.entries = infoMap.size(); hdr.size = sizeof(hdr) + entriesSize + valuesSize; hdr.data_offset = sizeof(hdr) + entriesSize; hdr.id_map_type = idMapType; buffer.write(&hdr); /* * Increment the handle for the ControlInfoMap by 2 to keep the handles * numerical space partitioned between instances initialized for a * different role. * * \sa ControlSerializer::Role */ serial_ += 2; /* * Serialize all entries. * \todo Serialize the control name too */ ByteStreamBuffer entries = buffer.carveOut(entriesSize); ByteStreamBuffer values = buffer.carveOut(valuesSize); for (const auto &ctrl : infoMap) { const ControlId *id = ctrl.first; const ControlInfo &info = ctrl.second; struct ipa_control_info_entry entry; entry.id = id->id(); entry.type = id->type(); entry.offset = values.offset(); entries.write(&entry); store(info, values); } if (buffer.overflow()) return -ENOSPC; /* * Store the map to handle association, to be used to serialize and * deserialize control lists. */ infoMapHandles_[&infoMap] = hdr.handle; return 0; } /** * \brief Serialize a ControlList in a buffer * \param[in] list The control list to serialize * \param[in] buffer The memory buffer where to serialize the ControlList * * Serialize the \a list into the \a buffer using the serialization format * defined by the IPA context interface in ipa_controls.h. * * \return 0 on success, a negative error code otherwise * \retval -ENOENT The ControlList is related to an unknown ControlInfoMap * \retval -ENOSPC Not enough space is available in the buffer */ int ControlSerializer::serialize(const ControlList &list, ByteStreamBuffer &buffer) { /* * Find the ControlInfoMap handle for the ControlList if it has one, or * use 0 for ControlList without a ControlInfoMap. */ unsigned int infoMapHandle; if (list.infoMap()) { auto iter = infoMapHandles_.find(list.infoMap()); if (iter == infoMapHandles_.end()) { LOG(Serializer, Error) << "Can't serialize ControlList: unknown ControlInfoMap"; return -ENOENT; } infoMapHandle = iter->second; } else { infoMapHandle = 0; } const ControlIdMap *idmap = list.idMap(); enum ipa_controls_id_map_type idMapType; if (idmap == &controls::controls) idMapType = IPA_CONTROL_ID_MAP_CONTROLS; else if (idmap == &properties::properties) idMapType = IPA_CONTROL_ID_MAP_PROPERTIES; else idMapType = IPA_CONTROL_ID_MAP_V4L2; size_t entriesSize = list.size() * sizeof(struct ipa_control_value_entry); size_t valuesSize = 0; for (const auto &ctrl : list) valuesSize += binarySize(ctrl.second); /* Prepare the packet header. */ struct ipa_controls_header hdr; hdr.version = IPA_CONTROLS_FORMAT_VERSION; hdr.handle = infoMapHandle; hdr.entries = list.size(); hdr.size = sizeof(hdr) + entriesSize + valuesSize; hdr.data_offset = sizeof(hdr) + entriesSize; hdr.id_map_type = idMapType; buffer.write(&hdr); ByteStreamBuffer entries = buffer.carveOut(entriesSize); ByteStreamBuffer values = buffer.carveOut(valuesSize); /* Serialize all entries. */ for (const auto &ctrl : list) { unsigned int id = ctrl.first; const ControlValue &value = ctrl.second; struct ipa_control_value_entry entry; entry.id = id; entry.type = value.type(); entry.is_array = value.isArray(); entry.count = value.numElements(); entry.offset = values.offset(); entries.write(&entry); store(value, values); } if (buffer.overflow()) return -ENOSPC; return 0; } ControlValue ControlSerializer::loadControlValue(ByteStreamBuffer &buffer, bool isArray, unsigned int count) { ControlType type; buffer.read(&type); ControlValue value; value.reserve(type, isArray, count); buffer.read(value.data()); return value; } ControlInfo ControlSerializer::loadControlInfo(ByteStreamBuffer &b) { ControlValue min = loadControlValue(b); ControlValue max = loadControlValue(b); ControlValue def = loadControlValue(b); return ControlInfo(min, max, def); } /** * \fn template<typename T> T ControlSerializer::deserialize(ByteStreamBuffer &buffer) * \brief Deserialize an object from a binary buffer * \param[in] buffer The memory buffer that contains the object * * This function is only valid when specialized for ControlInfoMap or * ControlList. Any other typename \a T is not supported. */ /** * \brief Deserialize a ControlInfoMap from a binary buffer * \param[in] buffer The memory buffer that contains the serialized map * * Re-construct a ControlInfoMap from a binary \a buffer containing data * serialized using the serialize() function. * * \return The deserialized ControlInfoMap */ template<> ControlInfoMap ControlSerializer::deserialize<ControlInfoMap>(ByteStreamBuffer &buffer) { const struct ipa_controls_header *hdr = buffer.read<decltype(*hdr)>(); if (!hdr) { LOG(Serializer, Error) << "Out of data"; return {}; } auto iter = infoMaps_.find(hdr->handle); if (iter != infoMaps_.end()) { LOG(Serializer, Debug) << "Use cached ControlInfoMap"; return iter->second; } if (hdr->version != IPA_CONTROLS_FORMAT_VERSION) { LOG(Serializer, Error) << "Unsupported controls format version " << hdr->version; return {}; } /* * Use the ControlIdMap corresponding to the id map type. If the type * references a globally defined id map (such as controls::controls * or properties::properties), use it. Otherwise, create a local id map * that will be populated with dynamically created ControlId instances * when deserializing individual ControlInfoMap entries. */ const ControlIdMap *idMap = nullptr; ControlIdMap *localIdMap = nullptr; switch (hdr->id_map_type) { case IPA_CONTROL_ID_MAP_CONTROLS: idMap = &controls::controls; break; case IPA_CONTROL_ID_MAP_PROPERTIES: idMap = &properties::properties; break; case IPA_CONTROL_ID_MAP_V4L2: controlIdMaps_.emplace_back(std::make_unique<ControlIdMap>()); localIdMap = controlIdMaps_.back().get(); idMap = localIdMap; break; default: LOG(Serializer, Error) << "Unknown id map type: " << hdr->id_map_type; return {}; } ByteStreamBuffer entries = buffer.carveOut(hdr->data_offset - sizeof(*hdr)); ByteStreamBuffer values = buffer.carveOut(hdr->size - hdr->data_offset); if (buffer.overflow()) { LOG(Serializer, Error) << "Out of data"; return {}; } ControlInfoMap::Map ctrls; for (unsigned int i = 0; i < hdr->entries; ++i) { const struct ipa_control_info_entry *entry = entries.read<decltype(*entry)>(); if (!entry) { LOG(Serializer, Error) << "Out of data"; return {}; } ControlType type = static_cast<ControlType>(entry->type); /* If we're using a local id map, populate it. */ if (localIdMap) { /** * \todo Find a way to preserve the control name for * debugging purpose. */ controlIds_.emplace_back(std::make_unique<ControlId>(entry->id, "", type)); (*localIdMap)[entry->id] = controlIds_.back().get(); } const ControlId *controlId = idMap->at(entry->id); ASSERT(controlId); if (entry->offset != values.offset()) { LOG(Serializer, Error) << "Bad data, entry offset mismatch (entry " << i << ")"; return {}; } /* Create and store the ControlInfo. */ ctrls.emplace(controlId, loadControlInfo(values)); } /* * Create the ControlInfoMap in the cache, and store the map to handle * association. */ infoMaps_[hdr->handle] = ControlInfoMap(std::move(ctrls), *idMap); ControlInfoMap &map = infoMaps_[hdr->handle]; infoMapHandles_[&map] = hdr->handle; return map; } /** * \brief Deserialize a ControlList from a binary buffer * \param[in] buffer The memory buffer that contains the serialized list * * Re-construct a ControlList from a binary \a buffer containing data * serialized using the serialize() function. * * \return The deserialized ControlList */ template<> ControlList ControlSerializer::deserialize<ControlList>(ByteStreamBuffer &buffer) { const struct ipa_controls_header *hdr = buffer.read<decltype(*hdr)>(); if (!hdr) { LOG(Serializer, Error) << "Out of data"; return {}; } if (hdr->version != IPA_CONTROLS_FORMAT_VERSION) { LOG(Serializer, Error) << "Unsupported controls format version " << hdr->version; return {}; } ByteStreamBuffer entries = buffer.carveOut(hdr->data_offset - sizeof(*hdr)); ByteStreamBuffer values = buffer.carveOut(hdr->size - hdr->data_offset); if (buffer.overflow()) { LOG(Serializer, Error) << "Out of data"; return {}; } /* * Retrieve the ControlIdMap associated with the ControlList. * * The idmap is either retrieved from the list's ControlInfoMap when * a valid handle has been initialized at serialization time, or by * using the header's id_map_type field for lists that refer to the * globally defined libcamera controls and properties, for which no * ControlInfoMap is available. */ const ControlIdMap *idMap; if (hdr->handle) { auto iter = std::find_if(infoMapHandles_.begin(), infoMapHandles_.end(), [&](decltype(infoMapHandles_)::value_type &entry) { return entry.second == hdr->handle; }); if (iter == infoMapHandles_.end()) { LOG(Serializer, Error) << "Can't deserialize ControlList: unknown ControlInfoMap"; return {}; } const ControlInfoMap *infoMap = iter->first; idMap = &infoMap->idmap(); } else { switch (hdr->id_map_type) { case IPA_CONTROL_ID_MAP_CONTROLS: idMap = &controls::controls; break; case IPA_CONTROL_ID_MAP_PROPERTIES: idMap = &properties::properties; break; case IPA_CONTROL_ID_MAP_V4L2: default: LOG(Serializer, Fatal) << "A list of V4L2 controls requires an ControlInfoMap"; return {}; } } /* * \todo When available, initialize the list with the ControlInfoMap * so that controls can be validated against their limits. * Currently no validation is performed, so it's fine relying on the * idmap only. */ ControlList ctrls(*idMap); for (unsigned int i = 0; i < hdr->entries; ++i) { const struct ipa_control_value_entry *entry = entries.read<decltype(*entry)>(); if (!entry) { LOG(Serializer, Error) << "Out of data"; return {}; } if (entry->offset != values.offset()) { LOG(Serializer, Error) << "Bad data, entry offset mismatch (entry " << i << ")"; return {}; } ctrls.set(entry->id, loadControlValue(values, entry->is_array, entry->count)); } return ctrls; } /** * \brief Check if a ControlInfoMap is cached * \param[in] infoMap The ControlInfoMap to check * * The ControlSerializer caches all ControlInfoMaps that it has (de)serialized. * This function checks if \a infoMap is in the cache. * * \return True if \a infoMap is in the cache or false otherwise */ bool ControlSerializer::isCached(const ControlInfoMap &infoMap) { return infoMapHandles_.count(&infoMap); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/framebuffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Frame buffer handling */ #include <libcamera/framebuffer.h> #include "libcamera/internal/framebuffer.h" #include <sys/stat.h> #include <libcamera/base/log.h> #include <libcamera/base/shared_fd.h> /** * \file libcamera/framebuffer.h * \brief Frame buffer handling * * \file libcamera/internal/framebuffer.h * \brief Internal frame buffer handling support */ namespace libcamera { LOG_DEFINE_CATEGORY(Buffer) /** * \struct FrameMetadata * \brief Metadata related to a captured frame * * The FrameMetadata structure stores all metadata related to a captured frame, * as stored in a FrameBuffer, such as capture status, timestamp and bytes used. */ /** * \enum FrameMetadata::Status * \brief Define the frame completion status * \var FrameMetadata::FrameSuccess * The frame has been captured with success and contains valid data. All fields * of the FrameMetadata structure are valid. * \var FrameMetadata::FrameError * An error occurred during capture of the frame. The frame data may be partly * or fully invalid. The sequence and timestamp fields of the FrameMetadata * structure is valid, the other fields may be invalid. * \var FrameMetadata::FrameCancelled * Capture stopped before the frame completed. The frame data is not valid. All * fields of the FrameMetadata structure but the status field are invalid. */ /** * \struct FrameMetadata::Plane * \brief Per-plane frame metadata * * Frames are stored in memory in one or multiple planes. The * FrameMetadata::Plane structure stores per-plane metadata. */ /** * \var FrameMetadata::Plane::bytesused * \brief Number of bytes occupied by the data in the plane, including line * padding * * This value may vary per frame for compressed formats. For uncompressed * formats it will be constant for all frames, but may be smaller than the * FrameBuffer size. */ /** * \var FrameMetadata::status * \brief Status of the frame * * The validity of other fields of the FrameMetadata structure depends on the * status value. */ /** * \var FrameMetadata::sequence * \brief Frame sequence number * * The sequence number is a monotonically increasing number assigned to the * frames captured by the stream. The value is increased by one for each frame. * Gaps in the sequence numbers indicate dropped frames. */ /** * \var FrameMetadata::timestamp * \brief Time when the frame was captured * * The timestamp is expressed as a number of nanoseconds relative to the system * clock since an unspecified time point. * * \todo Be more precise on what timestamps refer to. */ /** * \fn FrameMetadata::planes() * \copydoc FrameMetadata::planes() const */ /** * \fn FrameMetadata::planes() const * \brief Retrieve the array of per-plane metadata * \return The array of per-plane metadata */ /** * \class FrameBuffer::Private * \brief Base class for FrameBuffer private data * * The FrameBuffer::Private class stores all private data associated with a * framebuffer. It implements the d-pointer design pattern to hide core * FrameBuffer data from the public API, and exposes utility functions to * pipeline handlers. */ /** * \brief Construct a FrameBuffer::Private instance * \param[in] planes The frame memory planes * \param[in] cookie Cookie */ FrameBuffer::Private::Private(const std::vector<Plane> &planes, uint64_t cookie) : planes_(planes), cookie_(cookie), request_(nullptr), isContiguous_(true) { metadata_.planes_.resize(planes_.size()); } /** * \brief FrameBuffer::Private destructor */ FrameBuffer::Private::~Private() { } /** * \fn FrameBuffer::Private::setRequest() * \brief Set the request this buffer belongs to * \param[in] request Request to set * * For buffers added to requests by applications, this function is called by * Request::addBuffer() or Request::reuse(). For buffers internal to pipeline * handlers, it is called by the pipeline handlers themselves. */ /** * \fn FrameBuffer::Private::isContiguous() * \brief Check if the frame buffer stores planes contiguously in memory * * Multi-planar frame buffers can store their planes contiguously in memory, or * split them into discontiguous memory areas. This function checks in which of * these two categories the frame buffer belongs. * * \return True if the planes are stored contiguously in memory, false otherwise */ /** * \fn FrameBuffer::Private::fence() * \brief Retrieve a const pointer to the Fence * * This function does only return a reference to the the fence and does not * change its ownership. The fence is stored in the FrameBuffer and can only be * reset with FrameBuffer::releaseFence() in case the buffer has completed with * error due to a Fence wait failure. * * If buffer with a Fence completes with errors due to a failure in handling * the fence, applications are responsible for releasing the Fence before * calling Request::addBuffer() again. * * \sa Request::addBuffer() * * \return A const pointer to the Fence if any, nullptr otherwise */ /** * \fn FrameBuffer::Private::setFence() * \brief Move a \a fence in this buffer * \param[in] fence The Fence * * This function associates a Fence with this Framebuffer. The intended caller * is the Request::addBuffer() function. * * Once a FrameBuffer is associated with a Fence, the FrameBuffer will only be * made available to the hardware device once the Fence has been correctly * signalled. * * \sa Request::prepare() * * If the FrameBuffer completes successfully the core releases the Fence and the * Buffer can be reused immediately. If handling of the Fence fails during the * request preparation, the Fence is not released and is left in the * FrameBuffer. It is applications responsibility to correctly release the * fence and handle it opportunely before using the buffer again. */ /** * \fn FrameBuffer::Private::cancel() * \brief Marks the buffer as cancelled * * If a buffer is not used by a request, it shall be marked as cancelled to * indicate that the metadata is invalid. */ /** * \fn FrameBuffer::Private::metadata() * \brief Retrieve the dynamic metadata * \return Dynamic metadata for the frame contained in the buffer */ /** * \class FrameBuffer * \brief Frame buffer data and its associated dynamic metadata * * The FrameBuffer class is the primary interface for applications, IPAs and * pipeline handlers to interact with frame memory. It contains all the static * and dynamic information to manage the whole life cycle of a frame capture, * from buffer creation to consumption. * * The static information describes the memory planes that make a frame. The * planes are specified when creating the FrameBuffer and are expressed as a set * of dmabuf file descriptors, offset and length. * * The dynamic information is grouped in a FrameMetadata instance. It is updated * during the processing of a queued capture request, and is valid from the * completion of the buffer as signaled by Camera::bufferComplete() until the * FrameBuffer is either reused in a new request or deleted. * * The creator of a FrameBuffer (application, IPA or pipeline handler) may * associate to it an integer cookie for any private purpose. The cookie may be * set when creating the FrameBuffer, and updated at any time with setCookie(). * The cookie is transparent to the libcamera core and shall only be set by the * creator of the FrameBuffer. This mechanism supplements the Request cookie. */ /** * \struct FrameBuffer::Plane * \brief A memory region to store a single plane of a frame * * Planar pixel formats use multiple memory regions to store the different * colour components of a frame. The Plane structure describes such a memory * region by a dmabuf file descriptor, an offset within the dmabuf and a length. * A FrameBuffer then contains one or multiple planes, depending on the pixel * format of the frames it is meant to store. * * The offset identifies the location of the plane data from the start of the * memory referenced by the dmabuf file descriptor. Multiple planes may be * stored in the same dmabuf, in which case they will reference the same dmabuf * and different offsets. No two planes may overlap, as specified by their * offset and length. * * To support DMA access, planes are associated with dmabuf objects represented * by SharedFD handles. The Plane class doesn't handle mapping of the memory to * the CPU, but applications and IPAs may use the dmabuf file descriptors to map * the plane memory with mmap() and access its contents. * * \todo Specify how an application shall decide whether to use a single or * multiple dmabufs, based on the camera requirements. */ /** * \var FrameBuffer::Plane::kInvalidOffset * \brief Invalid offset value, to identify uninitialized planes */ /** * \var FrameBuffer::Plane::fd * \brief The dmabuf file descriptor */ /** * \var FrameBuffer::Plane::offset * \brief The plane offset in bytes */ /** * \var FrameBuffer::Plane::length * \brief The plane length in bytes */ namespace { ino_t fileDescriptorInode(const SharedFD &fd) { if (!fd.isValid()) return 0; struct stat st; int ret = fstat(fd.get(), &st); if (ret < 0) { ret = -errno; LOG(Buffer, Fatal) << "Failed to fstat() fd: " << strerror(-ret); return 0; } return st.st_ino; } } /* namespace */ /** * \brief Construct a FrameBuffer with an array of planes * \param[in] planes The frame memory planes * \param[in] cookie Cookie */ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie) : FrameBuffer(std::make_unique<Private>(planes, cookie)) { } /** * \brief Construct a FrameBuffer with an extensible private class * \param[in] d The extensible private class */ FrameBuffer::FrameBuffer(std::unique_ptr<Private> d) : Extensible(std::move(d)) { unsigned int offset = 0; bool isContiguous = true; ino_t inode = 0; for (const auto &plane : _d()->planes_) { ASSERT(plane.offset != Plane::kInvalidOffset); if (plane.offset != offset) { isContiguous = false; break; } /* * Two different dmabuf file descriptors may still refer to the * same dmabuf instance. Check this using inodes. */ if (plane.fd != _d()->planes_[0].fd) { if (!inode) inode = fileDescriptorInode(_d()->planes_[0].fd); if (fileDescriptorInode(plane.fd) != inode) { isContiguous = false; break; } } offset += plane.length; } LOG(Buffer, Debug) << "Buffer is " << (isContiguous ? "" : "not ") << "contiguous"; _d()->isContiguous_ = isContiguous; } /** * \brief Retrieve the static plane descriptors * \return Array of plane descriptors */ const std::vector<FrameBuffer::Plane> &FrameBuffer::planes() const { return _d()->planes_; } /** * \brief Retrieve the request this buffer belongs to * * The intended callers of this function are buffer completion handlers that * need to associate a buffer to the request it belongs to. * * A FrameBuffer is associated to a request by Request::addBuffer() and the * association is valid until the buffer completes. The returned request * pointer is valid only during that interval. * * \return The Request the FrameBuffer belongs to, or nullptr if the buffer is * not associated with a request */ Request *FrameBuffer::request() const { return _d()->request_; } /** * \brief Retrieve the dynamic metadata * \return Dynamic metadata for the frame contained in the buffer */ const FrameMetadata &FrameBuffer::metadata() const { return _d()->metadata_; } /** * \brief Retrieve the cookie * * The cookie belongs to the creator of the FrameBuffer, which controls its * lifetime and value. * * \sa setCookie() * * \return The cookie */ uint64_t FrameBuffer::cookie() const { return _d()->cookie_; } /** * \brief Set the cookie * \param[in] cookie Cookie to set * * The cookie belongs to the creator of the FrameBuffer. Its value may be * modified at any time with this function. Applications and IPAs shall not * modify the cookie value of buffers they haven't created themselves. The * libcamera core never modifies the buffer cookie. */ void FrameBuffer::setCookie(uint64_t cookie) { _d()->cookie_ = cookie; } /** * \brief Extract the Fence associated with this Framebuffer * * This function moves the buffer's fence ownership to the caller. * After the fence has been released, calling this function always return * nullptr. * * If buffer with a Fence completes with errors due to a failure in handling * the fence, applications are responsible for releasing the Fence before * calling Request::addBuffer() again. * * \return A unique pointer to the Fence if set, or nullptr if the fence has * been released already */ std::unique_ptr<Fence> FrameBuffer::releaseFence() { return std::move(_d()->fence_); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/media_object.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Media device objects: entities, pads and links */ #include "libcamera/internal/media_object.h" #include <errno.h> #include <string> #include <string.h> #include <unistd.h> #include <vector> #include <linux/media.h> #include <libcamera/base/log.h> #include "libcamera/internal/media_device.h" /** * \file media_object.h * \brief Provides a class hierarchy that represents the media objects exposed * by the Linux kernel Media Controller APIs. * * The abstract MediaObject class represents any Media Controller graph object * identified by an id unique in the media device context. It is subclassed by * the MediaEntity, MediaPad and MediaLink classes that represent the entities, * pads and links respectively. They are populated based on the media graph * information exposed by the Linux kernel through the MEDIA_IOC_G_TOPOLOGY * ioctl. * * As the media objects represent their kernel counterpart, information about * the properties they expose can be found in the Linux kernel documentation. * * All media objects are meant to be created and destroyed solely by the * MediaDevice and thus have private constructors and destructors. */ namespace libcamera { LOG_DECLARE_CATEGORY(MediaDevice) /** * \class MediaObject * \brief Base class for all media objects * * MediaObject is an abstract base class for all media objects in the * media graph. Each object is identified by a reference to the media * device it belongs to and a unique id within that media device. * This base class provide helpers to media objects to keep track of * these identifiers. * * \sa MediaEntity, MediaPad, MediaLink */ /** * \fn MediaObject::MediaObject() * \brief Construct a MediaObject part of the MediaDevice \a dev, * identified by the \a id unique within the device * \param[in] dev The media device this object belongs to * \param[in] id The media object id * * The caller shall ensure unicity of the object id in the media device context. * This constraint is neither enforced nor checked by the MediaObject. */ /** * \fn MediaObject::device() * \copydoc MediaObject::device() const */ /** * \fn MediaObject::device() const * \brief Retrieve the media device the media object belongs to * \return The MediaDevice */ /** * \fn MediaObject::id() * \brief Retrieve the media object id * \return The media object id */ /** * \var MediaObject::dev_ * \brief The media device the media object belongs to */ /** * \var MediaObject::id_ * \brief The media object id */ /** * \class MediaLink * \brief The MediaLink represents a link between two pads in the media graph. * * Links are created from the information provided by the Media Controller API * in the media_v2_link structure. They reference the source() and sink() pads * they connect and track the link status through link flags(). * * Each link is referenced in the link array of both of the pads it connect. */ /** * \brief Enable or disable a link * \param[in] enable True to enable the link, false to disable it * * Set the status of a link according to the value of \a enable. * Links between two pads can be set to the enabled or disabled state freely, * unless they're immutable links, whose status cannot be changed. * Enabling an immutable link is not considered an error, while trying to * disable it is. * * Enabling a link establishes a data connection between two pads, while * disabling it interrupts that connection. * * \return 0 on success or a negative error code otherwise */ int MediaLink::setEnabled(bool enable) { unsigned int flags = (flags_ & ~MEDIA_LNK_FL_ENABLED) | (enable ? MEDIA_LNK_FL_ENABLED : 0); int ret = dev_->setupLink(this, flags); if (ret) return ret; flags_ = flags; return 0; } /** * \brief Construct a MediaLink * \param[in] link The media link kernel data * \param[in] source The source pad at the origin of the link * \param[in] sink The sink pad at the destination of the link */ MediaLink::MediaLink(const struct media_v2_link *link, MediaPad *source, MediaPad *sink) : MediaObject(source->device(), link->id), source_(source), sink_(sink), flags_(link->flags) { } /** * \fn MediaLink::source() * \brief Retrieve the link's source pad * \return The source pad at the origin of the link */ /** * \fn MediaLink::sink() * \brief Retrieve the link's sink pad * \return The sink pad at the destination of the link */ /** * \fn MediaLink::flags() * \brief Retrieve the link's flags * * Link flags are a bitmask of flags defined by the Media Controller API * MEDIA_LNK_FL_* macros. * * \return The link flags */ /** * \class MediaPad * \brief The MediaPad represents a pad of an entity in the media graph * * Pads are created from the information provided by the Media Controller API * in the media_v2_pad structure. They reference the entity() they belong to. * * In addition to their graph id, media graph pads are identified by an index * unique in the context of the entity the pad belongs to. * * A pad can be either a 'source' pad or a 'sink' pad. This information is * captured in the pad flags(). * * Pads are connected through links. Links originating from a source pad are * outbound links, and links arriving at a sink pad are inbound links. Pads * reference all the links() that are connected to them. */ /** * \brief Construct a MediaPad * \param[in] pad The media pad kernel data * \param[in] entity The entity the pad belongs to */ MediaPad::MediaPad(const struct media_v2_pad *pad, MediaEntity *entity) : MediaObject(entity->device(), pad->id), index_(pad->index), entity_(entity), flags_(pad->flags) { } /** * \fn MediaPad::index() * \brief Retrieve the pad index * \return The 0-based pad index identifying the pad in the context of the * entity it belongs to */ /** * \fn MediaPad::entity() * \brief Retrieve the entity the pad belongs to * \return The MediaEntity the pad belongs to */ /** * \fn MediaPad::flags() * \brief Retrieve the pad flags * * Pad flags are a bitmask of flags defined by the Media Controller API * MEDIA_PAD_FL_* macros. * * \return The pad flags */ /** * \fn MediaPad::links() * \brief Retrieve all links in the pad * \return A list of links connected to the pad */ /** * \brief Add a new link to this pad * \param[in] link The MediaLink to add */ void MediaPad::addLink(MediaLink *link) { links_.push_back(link); } /** * \class MediaEntity * \brief The MediaEntity represents an entity in the media graph * * Entities are created from the information provided by the Media Controller * API in the media_v2_entity structure. They reference the pads() they contain. * * In addition to their graph id, media graph entities are identified by a * name() unique in the media device context. They implement a function() and * may expose a deviceNode(). */ /** * \enum MediaEntity::Type * \brief The type of the interface exposed by the entity to userspace * * \var MediaEntity::Type::Invalid * \brief Invalid or unsupported entity type * * \var MediaEntity::Type::MediaEntity * \brief Plain media entity with no userspace interface * * \var MediaEntity::Type::V4L2VideoDevice * \brief V4L2 video device with a V4L2 video device node * * \var MediaEntity::Type::V4L2Subdevice * \brief V4L2 subdevice with a V4L2 subdev device node */ /** * \fn MediaEntity::name() * \brief Retrieve the entity name * \return The entity name */ /** * \fn MediaEntity::function() * \brief Retrieve the entity's main function * * Media entity functions are expressed using the MEDIA_ENT_F_* macros * defined by the Media Controller API. * * \return The entity's function */ /** * \fn MediaEntity::flags() * \brief Retrieve the entity's flags * * Media entity flags are expressed using the MEDIA_ENT_FL_* macros * defined by the Media Controller API. * * \return The entity's flags */ /** * \fn MediaEntity::type() * \brief Retrieve the entity's type * * The entity type identifies the type of interface exposed to userspace. * * \return The entity's type */ /** * \fn MediaEntity::deviceNode() * \brief Retrieve the entity's device node path, if any * \return The entity's device node path, or an empty string if it is not set * \sa int setDeviceNode() */ /** * \fn MediaEntity::deviceMajor() * \brief Retrieve the major number of the interface associated with the entity * \return The interface major number, or 0 if the entity isn't associated with * an interface */ /** * \fn MediaEntity::deviceMinor() * \brief Retrieve the minor number of the interface associated with the entity * \return The interface minor number, or 0 if the entity isn't associated with * an interface */ /** * \fn MediaEntity::pads() * \brief Retrieve all pads of the entity * \return The list of the entity's pads */ /** * \brief Get a pad in this entity by its index * \param[in] index The 0-based pad index * \return The pad identified by \a index, or nullptr if no such pad exist */ const MediaPad *MediaEntity::getPadByIndex(unsigned int index) const { for (MediaPad *p : pads_) { if (p->index() == index) return p; } return nullptr; } /** * \brief Get a pad in this entity by its object id * \param[in] id The pad id * \return The pad identified by \a id, or nullptr if no such pad exist */ const MediaPad *MediaEntity::getPadById(unsigned int id) const { for (MediaPad *p : pads_) { if (p->id() == id) return p; } return nullptr; } /** * \brief Set the path to the device node for the associated interface * \param[in] deviceNode The interface device node path associated with this entity * \return 0 on success or a negative error code otherwise */ int MediaEntity::setDeviceNode(const std::string &deviceNode) { /* Make sure the device node can be accessed. */ int ret = ::access(deviceNode.c_str(), R_OK | W_OK); if (ret < 0) { ret = -errno; LOG(MediaDevice, Error) << "Device node " << deviceNode << " can't be accessed: " << strerror(-ret); return ret; } deviceNode_ = deviceNode; return 0; } /** * \brief Construct a MediaEntity * \param[in] dev The media device this entity belongs to * \param[in] entity The media entity kernel data * \param[in] iface The entity interface data (may be null) */ MediaEntity::MediaEntity(MediaDevice *dev, const struct media_v2_entity *entity, const struct media_v2_interface *iface) : MediaObject(dev, entity->id), name_(entity->name), function_(entity->function), flags_(entity->flags), type_(Type::MediaEntity), major_(0), minor_(0) { if (!iface) return; switch (iface->intf_type) { case MEDIA_INTF_T_V4L_VIDEO: type_ = Type::V4L2VideoDevice; break; case MEDIA_INTF_T_V4L_SUBDEV: type_ = Type::V4L2Subdevice; break; default: type_ = Type::Invalid; return; } major_ = iface->devnode.major; minor_ = iface->devnode.minor; } /** * \brief Add \a pad to the entity's list of pads * \param[in] pad The pad to add to the list * * This function is meant to add pads to the entity during parsing of the media * graph, after the MediaPad objects are constructed and before the MediaDevice * is made available externally. */ void MediaEntity::addPad(MediaPad *pad) { pads_.push_back(pad); } /** * \brief Add a MediaEntity to the list of ancillary entities * \param[in] ancillaryEntity The instance of MediaEntity to add */ void MediaEntity::addAncillaryEntity(MediaEntity *ancillaryEntity) { ancillaryEntities_.push_back(ancillaryEntity); } /** * \fn MediaEntity::ancillaryEntities() * \brief Retrieve all ancillary entities of the entity * \return The list of the entity's ancillary entities */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/control_ids_rpi.yaml
# SPDX-License-Identifier: LGPL-2.1-or-later # # Copyright (C) 2023, Raspberry Pi Ltd # %YAML 1.1 --- # Raspberry Pi (VC4 and PiSP) specific vendor controls vendor: rpi controls: - StatsOutputEnable: type: bool description: | Toggles the Raspberry Pi IPA to output a binary dump of the hardware generated statistics through the Request metadata in the Bcm2835StatsOutput control. \sa Bcm2835StatsOutput - Bcm2835StatsOutput: type: uint8_t size: [n] description: | Span of the BCM2835 ISP generated statistics for the current frame. This is sent in the Request metadata if the StatsOutputEnable is set to true. The statistics struct definition can be found in include/linux/bcm2835-isp.h. \sa StatsOutputEnable ...
0
repos/libcamera/src
repos/libcamera/src/libcamera/property_ids.cpp.in
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * property_ids.cpp : Property ID list * * This file is auto-generated. Do not edit. */ #include <libcamera/property_ids.h> /** * \file property_ids.h * \brief Camera property identifiers */ namespace libcamera { /** * \brief Namespace for libcamera properties */ namespace properties { ${controls_doc} ${vendor_controls_doc} #ifndef __DOXYGEN__ /* * Keep the properties definitions hidden from doxygen as it incorrectly parses * them as functions. */ ${controls_def} ${vendor_controls_def} #endif /** * \brief List of all supported libcamera properties */ extern const ControlIdMap properties { ${controls_map} }; } /* namespace properties */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/pixel_format.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera Pixel Format */ #include <libcamera/formats.h> #include <libcamera/pixel_format.h> #include "libcamera/internal/formats.h" /** * \file pixel_format.h * \brief libcamera pixel format */ namespace libcamera { /** * \class PixelFormat * \brief libcamera image pixel format * * The PixelFormat type describes the format of images in the public libcamera * API. It stores a FourCC value as a 32-bit unsigned integer and a modifier. * The FourCC and modifier values are defined in the Linux kernel DRM/KMS API * (see linux/drm_fourcc.h). Constant expressions for all pixel formats * supported by libcamera are available in libcamera/formats.h. */ /** * \fn PixelFormat::PixelFormat() * \brief Construct a PixelFormat with an invalid format * * PixelFormat instances constructed with the default constructor are * invalid, calling the isValid() function returns false. */ /** * \fn PixelFormat::PixelFormat(uint32_t fourcc, uint64_t modifier) * \brief Construct a PixelFormat from a DRM FourCC and a modifier * \param[in] fourcc A DRM FourCC * \param[in] modifier A DRM FourCC modifier */ /** * \brief Compare pixel formats for equality * \return True if the two pixel formats are equal, false otherwise */ bool PixelFormat::operator==(const PixelFormat &other) const { return fourcc_ == other.fourcc() && modifier_ == other.modifier_; } /** * \fn bool PixelFormat::operator!=(const PixelFormat &other) const * \brief Compare pixel formats for inequality * \return True if the two pixel formats are not equal, false otherwise */ /** * \brief Compare pixel formats for smaller than order * \return True if \a this is smaller than \a other, false otherwise */ bool PixelFormat::operator<(const PixelFormat &other) const { if (fourcc_ < other.fourcc_) return true; if (fourcc_ > other.fourcc_) return false; return modifier_ < other.modifier_; } /** * \fn bool PixelFormat::isValid() const * \brief Check if the pixel format is valid * * PixelFormat instances constructed with the default constructor are * invalid. Instances constructed with a FourCC defined in the DRM API * are valid. The behaviour is undefined otherwise. * * \return True if the pixel format is valid, false otherwise */ /** * \fn PixelFormat::operator uint32_t() const * \brief Convert the the pixel format numerical value * \return The pixel format numerical value */ /** * \fn PixelFormat::fourcc() const * \brief Retrieve the pixel format FourCC * \return DRM FourCC */ /** * \fn PixelFormat::modifier() const * \brief Retrieve the pixel format modifier * \return DRM modifier */ /** * \brief Assemble and return a string describing the pixel format * \return A string describing the pixel format */ std::string PixelFormat::toString() const { const PixelFormatInfo &info = PixelFormatInfo::info(*this); if (!info.isValid()) { if (*this == PixelFormat()) return "<INVALID>"; char fourcc[7] = { '<', static_cast<char>(fourcc_), static_cast<char>(fourcc_ >> 8), static_cast<char>(fourcc_ >> 16), static_cast<char>(fourcc_ >> 24), '>' }; for (unsigned int i = 1; i < 5; i++) { if (!isprint(fourcc[i])) fourcc[i] = '.'; } return fourcc; } return info.name; } /** * \brief Create a PixelFormat from a string * \return The PixelFormat represented by the \a name if known, or an * invalid pixel format otherwise. */ PixelFormat PixelFormat::fromString(const std::string &name) { return PixelFormatInfo::info(name).format; } /** * \brief Insert a text representation of a PixelFormat into an output stream * \param[in] out The output stream * \param[in] f The PixelFormat * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const PixelFormat &f) { out << f.toString(); return out; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_interface.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Image Processing Algorithm interface */ #include <libcamera/ipa/ipa_interface.h> /** * \file ipa_interface.h * \brief Image Processing Algorithm interface * * Every pipeline handler in libcamera may attach some or all of its cameras to * an Image Processing Algorithm (IPA) module. An IPA module is developed for a * specific pipeline handler and each pipeline handler may be compatible with * multiple IPA implementations, both open and closed source. To support this, * libcamera communicates with IPA modules through a per-pipeline C++ interface. * * IPA modules shall provide an ipaCreate() function exported as a public C * symbol with the following prototype: * * \code{.c} * IPAInterface *ipaCreate(); * \endcode * * The ipaCreate() function creates an instance of an IPA interface, which models * a context of execution for the IPA. IPA modules shall support creating one * context per camera, as required by their associated pipeline handler. * * The IPA module interface operations are defined in the mojom file * corresponding to the pipeline handler, in * include/libcamera/ipa/{pipeline_name}.mojom. * * The IPA interface is specific to each pipeline handler. The pipeline handlers * define a set of operations used to communicate with their IPA modules. The * operations, along with the data structures they use, are collectively * referred to as the IPA protocol. * * The IPA protocol is defined using the * <a href="https://chromium.googlesource.com/chromium/src/+/master/mojo/public/tools/bindings/README.md">Mojo interface definition language</a>, * in a Mojo module file stored in include/libcamera/ipa/{pipeline_name}.mojom. * The Mojo module contains two Mojo interfaces: IPAInterface defines the * operations exposed by the IPA and called by the pipeline handler, and * IPAEventInterface defines the events generated by the IPA and received by the * pipeline handler. * * \todo Add reference to how pipelines shall document their protocol. * * IPAs can be isolated in a separate process. This implies that arguments to * the IPA interface functions may need to be transferred over IPC. An IPA * proxy is auto-generated based on the mojom file, which abstracts away the * (de)serialization from the pipeline handler and the IPA implementation. Thus * any C++ structure that is defined in the mojom file, or the C++ libcamera * objects that are listed in core.mojom, can be used directly. * * Due to IPC, synchronous communication between pipeline handlers and IPAs can * be costly. For that reason, functions that cannot afford the high cost * should be marked as [async] in the mojom file, and they will operate * asynchronously. This implies that these functions don't return a status, and * that all functions may copy their arguments. Synchronous functions are still * allowed, but should be used with caution. */ /** * \fn ipaCreate() * \brief Entry point to the IPA modules * * This function is the entry point to the IPA modules. It is implemented by * every IPA module, and called by libcamera to create a new IPA interface * instance. * * \return A newly created IPA interface instance */ namespace libcamera { /** * \class IPAInterface * \brief C++ Interface for IPA implementation * * This pure virtual class defines a skeletal C++ API for IPA modules. * Specializations of this class must be defined in a mojom file in * include/libcamera/ipa/ (see the IPA Writers Guide for details * on how to do so). * * Due to process isolation all arguments to the IPAInterface member functions * and signals may need to be transferred over IPC. The class thus uses * serializable data types only. The IPA C++ interface defines custom data * structures that mirror core libcamera structures when the latter are not * suitable, such as IPAStream to carry StreamConfiguration data. * * Custom data structures may also be defined in the mojom file, in which case * the (de)serialization will automatically be generated. If any other libcamera * structures are to be used as parameters, then a (de)serializer for them must * be implemented in IPADataSerializer. * * The pipeline handlers shall use the IPAManager to locate a compatible * IPAInterface. The interface may then be used to interact with the IPA module. * * \todo Figure out how to generate IPAInterface documentation. */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/device_enumerator_sysfs.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * sysfs-based device enumerator */ #include "libcamera/internal/device_enumerator_sysfs.h" #include <dirent.h> #include <fcntl.h> #include <fstream> #include <stdlib.h> #include <string.h> #include <sys/ioctl.h> #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include <libcamera/base/log.h> #include "libcamera/internal/media_device.h" namespace libcamera { LOG_DECLARE_CATEGORY(DeviceEnumerator) int DeviceEnumeratorSysfs::init() { return 0; } int DeviceEnumeratorSysfs::enumerate() { struct dirent *ent; DIR *dir; static const char * const sysfs_dirs[] = { "/sys/subsystem/media/devices", "/sys/bus/media/devices", "/sys/class/media/devices", }; for (const char *dirname : sysfs_dirs) { dir = opendir(dirname); if (dir) break; } if (!dir) { LOG(DeviceEnumerator, Error) << "No valid sysfs media device directory"; return -ENODEV; } while ((ent = readdir(dir)) != nullptr) { if (strncmp(ent->d_name, "media", 5)) continue; char *end; unsigned int idx = strtoul(ent->d_name + 5, &end, 10); if (*end != '\0') continue; std::string devnode = "/dev/media" + std::to_string(idx); /* Verify that the device node exists. */ struct stat devstat; if (stat(devnode.c_str(), &devstat) < 0) { LOG(DeviceEnumerator, Warning) << "Device node /dev/media" << idx << " should exist but doesn't"; continue; } std::unique_ptr<MediaDevice> media = createDevice(devnode); if (!media) continue; if (populateMediaDevice(media.get()) < 0) { LOG(DeviceEnumerator, Warning) << "Failed to populate media device " << media->deviceNode() << " (" << media->driver() << "), skipping"; continue; } addDevice(std::move(media)); } closedir(dir); return 0; } int DeviceEnumeratorSysfs::populateMediaDevice(MediaDevice *media) { /* Associate entities to device node paths. */ for (MediaEntity *entity : media->entities()) { if (entity->deviceMajor() == 0 && entity->deviceMinor() == 0) continue; std::string deviceNode = lookupDeviceNode(entity->deviceMajor(), entity->deviceMinor()); if (deviceNode.empty()) return -EINVAL; int ret = entity->setDeviceNode(deviceNode); if (ret) return ret; } return 0; } /** * \brief Lookup device node path from device number * \param[in] major The device major number * \param[in] minor The device minor number * * Translate a device number given as \a major and \a minor to a device node * path. * * \return The device node path on success, or an empty string if the lookup * fails */ std::string DeviceEnumeratorSysfs::lookupDeviceNode(int major, int minor) { std::string deviceNode; std::string line; std::ifstream ueventFile; ueventFile.open("/sys/dev/char/" + std::to_string(major) + ":" + std::to_string(minor) + "/uevent"); if (!ueventFile) return std::string(); while (ueventFile >> line) { if (line.find("DEVNAME=") == 0) { deviceNode = "/dev/" + line.substr(strlen("DEVNAME=")); break; } } ueventFile.close(); return deviceNode; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/delayed_controls.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Raspberry Pi Ltd * * Helper to deal with controls that take effect with a delay */ #include "libcamera/internal/delayed_controls.h" #include <libcamera/base/log.h> #include <libcamera/controls.h> #include "libcamera/internal/v4l2_device.h" /** * \file delayed_controls.h * \brief Helper to deal with controls that take effect with a delay */ namespace libcamera { LOG_DEFINE_CATEGORY(DelayedControls) /** * \class DelayedControls * \brief Helper to deal with controls that take effect with a delay * * Some sensor controls take effect with a delay as the sensor needs time to * adjust, for example exposure and analog gain. This is a helper class to deal * with such controls and the intended users are pipeline handlers. * * The idea is to extend the concept of the buffer depth of a pipeline the * application needs to maintain to also cover controls. Just as with buffer * depth if the application keeps the number of requests queued above the * control depth the controls are guaranteed to take effect for the correct * request. The control depth is determined by the control with the greatest * delay. */ /** * \struct DelayedControls::ControlParams * \brief Parameters associated with controls handled by the \a DelayedControls * helper class * * \var ControlParams::delay * \brief Frame delay from setting the control on a sensor device to when it is * consumed during framing. * * \var ControlParams::priorityWrite * \brief Flag to indicate that this control must be applied ahead of, and * separately from the other controls. * * Typically set for the \a V4L2_CID_VBLANK control so that the device driver * does not reject \a V4L2_CID_EXPOSURE control values that may be outside of * the existing vertical blanking specified bounds, but are within the new * blanking bounds. */ /** * \brief Construct a DelayedControls instance * \param[in] device The V4L2 device the controls have to be applied to * \param[in] controlParams Map of the numerical V4L2 control ids to their * associated control parameters. * * The control parameters comprise of delays (in frames) and a priority write * flag. If this flag is set, the relevant control is written separately from, * and ahead of the rest of the batched controls. * * Only controls specified in \a controlParams are handled. If it's desired to * mix delayed controls and controls that take effect immediately the immediate * controls must be listed in the \a controlParams map with a delay value of 0. */ DelayedControls::DelayedControls(V4L2Device *device, const std::unordered_map<uint32_t, ControlParams> &controlParams) : device_(device), maxDelay_(0) { const ControlInfoMap &controls = device_->controls(); /* * Create a map of control ids to delays for controls exposed by the * device. */ for (auto const &param : controlParams) { auto it = controls.find(param.first); if (it == controls.end()) { LOG(DelayedControls, Error) << "Delay request for control id " << utils::hex(param.first) << " but control is not exposed by device " << device_->deviceNode(); continue; } const ControlId *id = it->first; controlParams_[id] = param.second; LOG(DelayedControls, Debug) << "Set a delay of " << controlParams_[id].delay << " and priority write flag " << controlParams_[id].priorityWrite << " for " << id->name(); maxDelay_ = std::max(maxDelay_, controlParams_[id].delay); } reset(); } /** * \brief Reset state machine * * Resets the state machine to a starting position based on control values * retrieved from the device. */ void DelayedControls::reset() { queueCount_ = 1; writeCount_ = 0; /* Retrieve control as reported by the device. */ std::vector<uint32_t> ids; for (auto const &param : controlParams_) ids.push_back(param.first->id()); ControlList controls = device_->getControls(ids); /* Seed the control queue with the controls reported by the device. */ values_.clear(); for (const auto &ctrl : controls) { const ControlId *id = device_->controls().idmap().at(ctrl.first); /* * Do not mark this control value as updated, it does not need * to be written to to device on startup. */ values_[id][0] = Info(ctrl.second, false); } } /** * \brief Push a set of controls on the queue * \param[in] controls List of controls to add to the device queue * * Push a set of controls to the control queue. This increases the control queue * depth by one. * * \returns true if \a controls are accepted, or false otherwise */ bool DelayedControls::push(const ControlList &controls) { /* Copy state from previous frame. */ for (auto &ctrl : values_) { Info &info = ctrl.second[queueCount_]; info = values_[ctrl.first][queueCount_ - 1]; info.updated = false; } /* Update with new controls. */ const ControlIdMap &idmap = device_->controls().idmap(); for (const auto &control : controls) { const auto &it = idmap.find(control.first); if (it == idmap.end()) { LOG(DelayedControls, Warning) << "Unknown control " << control.first; return false; } const ControlId *id = it->second; if (controlParams_.find(id) == controlParams_.end()) return false; Info &info = values_[id][queueCount_]; info = Info(control.second); LOG(DelayedControls, Debug) << "Queuing " << id->name() << " to " << info.toString() << " at index " << queueCount_; } queueCount_++; return true; } /** * \brief Read back controls in effect at a sequence number * \param[in] sequence The sequence number to get controls for * * Read back what controls where in effect at a specific sequence number. The * history is a ring buffer of 16 entries where new and old values coexist. It's * the callers responsibility to not read too old sequence numbers that have been * pushed out of the history. * * Historic values are evicted by pushing new values onto the queue using * push(). The max history from the current sequence number that yields valid * values are thus 16 minus number of controls pushed. * * \return The controls at \a sequence number */ ControlList DelayedControls::get(uint32_t sequence) { unsigned int index = std::max<int>(0, sequence - maxDelay_); ControlList out(device_->controls()); for (const auto &ctrl : values_) { const ControlId *id = ctrl.first; const Info &info = ctrl.second[index]; out.set(id->id(), info); LOG(DelayedControls, Debug) << "Reading " << id->name() << " to " << info.toString() << " at index " << index; } return out; } /** * \brief Inform DelayedControls of the start of a new frame * \param[in] sequence Sequence number of the frame that started * * Inform the state machine that a new frame has started and of its sequence * number. Any user of these helpers is responsible to inform the helper about * the start of any frame. This can be connected with ease to the start of a * exposure (SOE) V4L2 event. */ void DelayedControls::applyControls(uint32_t sequence) { LOG(DelayedControls, Debug) << "frame " << sequence << " started"; /* * Create control list peeking ahead in the value queue to ensure * values are set in time to satisfy the sensor delay. */ ControlList out(device_->controls()); for (auto &ctrl : values_) { const ControlId *id = ctrl.first; unsigned int delayDiff = maxDelay_ - controlParams_[id].delay; unsigned int index = std::max<int>(0, writeCount_ - delayDiff); Info &info = ctrl.second[index]; if (info.updated) { if (controlParams_[id].priorityWrite) { /* * This control must be written now, it could * affect validity of the other controls. */ ControlList priority(device_->controls()); priority.set(id->id(), info); device_->setControls(&priority); } else { /* * Batch up the list of controls and write them * at the end of the function. */ out.set(id->id(), info); } LOG(DelayedControls, Debug) << "Setting " << id->name() << " to " << info.toString() << " at index " << index; /* Done with this update, so mark as completed. */ info.updated = false; } } writeCount_ = sequence + 1; while (writeCount_ > queueCount_) { LOG(DelayedControls, Debug) << "Queue is empty, auto queue no-op."; push({}); } device_->setControls(&out); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/version.cpp.in
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * libcamera version * * This file is auto-generated. Do not edit. */ #include <libcamera/camera_manager.h> namespace libcamera { const std::string CameraManager::version_("v@VCS_TAG@"); } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/camera_lens.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * A camera lens */ #include "libcamera/internal/camera_lens.h" #include <libcamera/base/utils.h> #include "libcamera/internal/v4l2_subdevice.h" /** * \file camera_lens.h * \brief A camera lens controller */ namespace libcamera { LOG_DEFINE_CATEGORY(CameraLens) /** * \class CameraLens * \brief A camera lens based on V4L2 subdevices * * The CameraLens class eases handling of lens for pipeline handlers by * hiding the details of the V4L2 subdevice kernel API and caching lens * information. */ /** * \brief Construct a CameraLens * \param[in] entity The media entity backing the camera lens controller * * Once constructed the instance must be initialized with init(). */ CameraLens::CameraLens(const MediaEntity *entity) : entity_(entity) { } /** * \brief Destroy a CameraLens */ CameraLens::~CameraLens() = default; /** * \brief Initialize the camera lens instance * * This function performs the initialisation steps of the CameraLens that may * fail. It shall be called once and only once after constructing the instance. * * \return 0 on success or a negative error code otherwise */ int CameraLens::init() { if (entity_->function() != MEDIA_ENT_F_LENS) { LOG(CameraLens, Error) << "Invalid lens function " << utils::hex(entity_->function()); return -EINVAL; } /* Create and open the subdev. */ subdev_ = std::make_unique<V4L2Subdevice>(entity_); int ret = subdev_->open(); if (ret < 0) return ret; ret = validateLensDriver(); if (ret) return ret; model_ = subdev_->model(); return 0; } /** * \brief This function sets the focal point of the lens to a specific position. * \param[in] position The focal point of the lens * * This function sets the value of focal point of the lens as in \a position. * * \return 0 on success or -EINVAL otherwise */ int CameraLens::setFocusPosition(int32_t position) { ControlList lensCtrls(subdev_->controls()); lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE, static_cast<int32_t>(position)); if (subdev_->setControls(&lensCtrls)) return -EINVAL; return 0; } int CameraLens::validateLensDriver() { int ret = 0; static constexpr uint32_t mandatoryControls[] = { V4L2_CID_FOCUS_ABSOLUTE, }; const ControlInfoMap &controls = subdev_->controls(); for (uint32_t ctrl : mandatoryControls) { if (!controls.count(ctrl)) { LOG(CameraLens, Error) << "Mandatory V4L2 control " << utils::hex(ctrl) << " not available"; ret = -EINVAL; } } if (ret) { LOG(CameraLens, Error) << "The lens kernel driver needs to be fixed"; LOG(CameraLens, Error) << "See Documentation/lens_driver_requirements.rst in" << " the libcamera sources for more information"; return ret; } return ret; } /** * \fn CameraLens::model() * \brief Retrieve the lens model name * * The lens model name is a free-formed string that uniquely identifies the * lens model. * * \return The lens model name */ std::string CameraLens::logPrefix() const { return "'" + entity_->name() + "'"; } /** * \fn CameraLens::controls() * \brief Retrieve the V4L2 controls of the lens' subdev * * \return A map of the V4L2 controls supported by the lens' driver */ const ControlInfoMap &CameraLens::controls() const { return subdev_->controls(); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/source_paths.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Identify libcamera source and build paths */ #include "libcamera/internal/source_paths.h" #include <dlfcn.h> #include <elf.h> #include <link.h> #include <stdlib.h> #include <sys/stat.h> #include <sys/types.h> #include <libcamera/base/utils.h> /** * \file source_paths.h * \brief Identify the build and source path of a not-yet-installed library */ /* musl doesn't declare _DYNAMIC in link.h, declare it manually. */ extern ElfW(Dyn) _DYNAMIC[]; namespace libcamera { namespace { /** * \brief Check if libcamera is installed or not * * Utilise the build_rpath dynamic tag which is stripped out by meson at * install time to determine at runtime if the library currently executing * has been installed or not. * * \return True if libcamera is installed, false otherwise */ bool isLibcameraInstalled() { /* * DT_RUNPATH (DT_RPATH when the linker uses old dtags) is removed on * install. */ for (const ElfW(Dyn) *dyn = _DYNAMIC; dyn->d_tag != DT_NULL; ++dyn) { if (dyn->d_tag == DT_RUNPATH || dyn->d_tag == DT_RPATH) return false; } return true; } } /* namespace */ namespace utils { /** * \brief Retrieve the path to the build directory * * During development, it is useful to run libcamera binaries directly from the * build directory without installing them. This function helps components that * need to locate resources in the build tree, such as IPA modules or IPA proxy * workers, by providing them with the path to the root of the build directory. * Callers can then use it to complement or override searches in system-wide * directories. * * If libcamera has been installed, the build directory path is not available * and this function returns an empty string. * * \return The path to the build directory if running from a build, or an empty * string otherwise */ std::string libcameraBuildPath() { if (isLibcameraInstalled()) return std::string(); Dl_info info; /* Look up our own symbol. */ int ret = dladdr(reinterpret_cast<void *>(libcameraBuildPath), &info); if (ret == 0) return std::string(); std::string path = dirname(info.dli_fname) + "/../../"; char *real = realpath(path.c_str(), nullptr); if (!real) return std::string(); path = real; free(real); return path + "/"; } /** * \brief Retrieve the path to the source directory * * During development, it is useful to run libcamera binaries directly from the * build directory without installing them. This function helps components that * need to locate resources in the source tree, such as IPA configuration * files, by providing them with the path to the root of the source directory. * Callers can then use it to complement or override searches in system-wide * directories. * * If libcamera has been installed, the source directory path is not available * and this function returns an empty string. * * \return The path to the source directory if running from a build directory, * or an empty string otherwise */ std::string libcameraSourcePath() { std::string path = libcameraBuildPath(); if (path.empty()) return std::string(); path += "source"; char *real = realpath(path.c_str(), nullptr); if (!real) return std::string(); path = real; free(real); struct stat statbuf; int ret = stat(path.c_str(), &statbuf); if (ret < 0 || (statbuf.st_mode & S_IFMT) != S_IFDIR) return std::string(); return path + "/"; } } /* namespace utils */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/v4l2_videodevice.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * V4L2 Video Device */ #include "libcamera/internal/v4l2_videodevice.h" #include <algorithm> #include <array> #include <fcntl.h> #include <iomanip> #include <sstream> #include <string.h> #include <sys/ioctl.h> #include <sys/syscall.h> #include <sys/time.h> #include <unistd.h> #include <vector> #include <linux/version.h> #include <libcamera/base/event_notifier.h> #include <libcamera/base/log.h> #include <libcamera/base/shared_fd.h> #include <libcamera/base/unique_fd.h> #include <libcamera/base/utils.h> #include "libcamera/internal/formats.h" #include "libcamera/internal/framebuffer.h" #include "libcamera/internal/media_device.h" #include "libcamera/internal/media_object.h" /** * \file v4l2_videodevice.h * \brief V4L2 Video Device */ namespace libcamera { LOG_DECLARE_CATEGORY(V4L2) /** * \struct V4L2Capability * \brief struct v4l2_capability object wrapper and helpers * * The V4L2Capability structure manages the information returned by the * VIDIOC_QUERYCAP ioctl. */ /** * \fn V4L2Capability::driver() * \brief Retrieve the driver module name * \return The string containing the name of the driver module */ /** * \fn V4L2Capability::card() * \brief Retrieve the video device card name * \return The string containing the video device name */ /** * \fn V4L2Capability::bus_info() * \brief Retrieve the location of the video device in the system * \return The string containing the video device location */ /** * \fn V4L2Capability::device_caps() * \brief Retrieve the capabilities of the video device * \return The video device specific capabilities if V4L2_CAP_DEVICE_CAPS is * set or driver capabilities otherwise */ /** * \fn V4L2Capability::isMultiplanar() * \brief Identify if the video device implements the V4L2 multiplanar APIs * \return True if the video device supports multiplanar APIs */ /** * \fn V4L2Capability::isCapture() * \brief Identify if the video device captures data * \return True if the video device can capture data */ /** * \fn V4L2Capability::isOutput() * \brief Identify if the video device outputs data * \return True if the video device can output data */ /** * \fn V4L2Capability::isVideo() * \brief Identify if the video device captures or outputs images * \return True if the video device can capture or output images */ /** * \fn V4L2Capability::isM2M() * \brief Identify if the device is a Memory-to-Memory device * \return True if the device can capture and output images using the M2M API */ /** * \fn V4L2Capability::isMeta() * \brief Identify if the video device captures or outputs image meta-data * \return True if the video device can capture or output image meta-data */ /** * \fn V4L2Capability::isVideoCapture() * \brief Identify if the video device captures images * \return True if the video device can capture images */ /** * \fn V4L2Capability::isVideoOutput() * \brief Identify if the video device outputs images * \return True if the video device can output images */ /** * \fn V4L2Capability::isMetaCapture() * \brief Identify if the video device captures image meta-data * \return True if the video device can capture image meta-data */ /** * \fn V4L2Capability::isMetaOutput() * \brief Identify if the video device outputs image meta-data * \return True if the video device can output image meta-data */ /** * \fn V4L2Capability::hasStreaming() * \brief Determine if the video device can perform Streaming I/O * \return True if the video device provides Streaming I/O IOCTLs */ /** * \fn V4L2Capability::hasMediaController() * \brief Determine if the video device uses Media Controller to configure I/O * \return True if the video device is controlled by a Media Controller device */ /** * \class V4L2BufferCache * \brief Hot cache of associations between V4L2 buffer indexes and FrameBuffer * * When importing buffers, V4L2 performs lazy mapping of dmabuf instances at * VIDIOC_QBUF (or VIDIOC_PREPARE_BUF) time and keeps the mapping associated * with the V4L2 buffer, as identified by its index. If the same V4L2 buffer is * then reused and queued with different dmabufs, the old dmabufs will be * unmapped and the new ones mapped. To keep this process efficient, it is * crucial to consistently use the same V4L2 buffer for given dmabufs through * the whole duration of a capture cycle. * * The V4L2BufferCache class keeps a map of previous dmabufs to V4L2 buffer * index associations to help selecting V4L2 buffers. It tracks, for every * entry, if the V4L2 buffer is in use, and offers lookup of the best free V4L2 * buffer for a set of dmabufs. */ /** * \brief Create an empty cache with \a numEntries entries * \param[in] numEntries Number of entries to reserve in the cache * * Create a cache with \a numEntries entries all marked as unused. The entries * will be populated as the cache is used. This is typically used to implement * buffer import, with buffers added to the cache as they are queued. */ V4L2BufferCache::V4L2BufferCache(unsigned int numEntries) : lastUsedCounter_(1), missCounter_(0) { cache_.resize(numEntries); } /** * \brief Create a pre-populated cache * \param[in] buffers Array of buffers to pre-populated with * * Create a cache pre-populated with \a buffers. This is typically used to * implement buffer export, with all buffers added to the cache when they are * allocated. */ V4L2BufferCache::V4L2BufferCache(const std::vector<std::unique_ptr<FrameBuffer>> &buffers) : lastUsedCounter_(1), missCounter_(0) { for (const std::unique_ptr<FrameBuffer> &buffer : buffers) cache_.emplace_back(true, lastUsedCounter_.fetch_add(1, std::memory_order_acq_rel), *buffer.get()); } V4L2BufferCache::~V4L2BufferCache() { if (missCounter_ > cache_.size()) LOG(V4L2, Debug) << "Cache misses: " << missCounter_; } /** * \brief Check if all the entries in the cache are unused */ bool V4L2BufferCache::isEmpty() const { for (auto const &entry : cache_) { if (!entry.free_) return false; } return true; } /** * \brief Find the best V4L2 buffer for a FrameBuffer * \param[in] buffer The FrameBuffer * * Find the best V4L2 buffer index to be used for the FrameBuffer \a buffer * based on previous mappings of frame buffers to V4L2 buffers. If a free V4L2 * buffer previously used with the same dmabufs as \a buffer is found in the * cache, return its index. Otherwise return the index of the first free V4L2 * buffer and record its association with the dmabufs of \a buffer. * * \return The index of the best V4L2 buffer, or -ENOENT if no free V4L2 buffer * is available */ int V4L2BufferCache::get(const FrameBuffer &buffer) { bool hit = false; int use = -1; uint64_t oldest = UINT64_MAX; for (unsigned int index = 0; index < cache_.size(); index++) { const Entry &entry = cache_[index]; if (!entry.free_) continue; /* Try to find a cache hit by comparing the planes. */ if (entry == buffer) { hit = true; use = index; break; } if (entry.lastUsed_ < oldest) { use = index; oldest = entry.lastUsed_; } } if (!hit) missCounter_++; if (use < 0) return -ENOENT; cache_[use] = Entry(false, lastUsedCounter_.fetch_add(1, std::memory_order_acq_rel), buffer); return use; } /** * \brief Mark buffer \a index as free in the cache * \param[in] index The V4L2 buffer index */ void V4L2BufferCache::put(unsigned int index) { ASSERT(index < cache_.size()); cache_[index].free_ = true; } V4L2BufferCache::Entry::Entry() : free_(true), lastUsed_(0) { } V4L2BufferCache::Entry::Entry(bool free, uint64_t lastUsed, const FrameBuffer &buffer) : free_(free), lastUsed_(lastUsed) { for (const FrameBuffer::Plane &plane : buffer.planes()) planes_.emplace_back(plane); } bool V4L2BufferCache::Entry::operator==(const FrameBuffer &buffer) const { const std::vector<FrameBuffer::Plane> &planes = buffer.planes(); if (planes_.size() != planes.size()) return false; for (unsigned int i = 0; i < planes.size(); i++) if (planes_[i].fd != planes[i].fd.get() || planes_[i].length != planes[i].length) return false; return true; } /** * \class V4L2DeviceFormat * \brief The V4L2 video device image format and sizes * * This class describes the image format and resolution to be programmed on a * V4L2 video device. The image format is defined by a fourcc code (as specified * by the V4L2 API with the V4L2_PIX_FMT_* macros), a resolution (width and * height) and one to three planes with configurable line stride and a total * per-plane size in bytes. * * Image formats, as defined by the V4L2 APIs, are categorised as packed, * semi-planar and planar, and describe the layout of the image pixel components * stored in memory. * * Packed image formats store pixel components one after the other, in a * contiguous memory area. Examples of packed image formats are YUYV * permutations, RGB with different pixel sub-sampling ratios such as RGB565 or * RGB666 or Raw-Bayer formats such as SRGGB8 or SGRBG12. * * Semi-planar and planar image formats store the pixel components in separate * and possibly non-contiguous memory areas, named planes, whose sizes depend on * the pixel components sub-sampling ratios, which are defined by the format. * Semi-planar formats use two planes to store pixel components and notable * examples of such formats are the NV12 and NV16 formats, while planar formats * use three planes to store pixel components and notable examples are YUV422 * and YUV420. * * Image formats supported by the V4L2 API are defined and described in Section * number 2 of the "Part I - Video for Linux API" chapter of the "Linux Media * Infrastructure userspace API", part of the Linux kernel documentation. * * In the context of this document, packed image formats are referred to as * "packed formats" and semi-planar and planar image formats are referred to as * "planar formats". * * V4L2 also defines two different sets of APIs to work with devices that store * planes in contiguous or separate memory areas. They are named "Single-plane * APIs" and "Multi-plane APIs" respectively and are documented in Section 2.1 * and Section 2.2 of the above mentioned "Part I - Video for Linux API" * documentation. * * The single-plane API allows, among other parameters, the configuration of the * image resolution, the pixel format and the stride length. In that case the * stride applies to all planes (possibly sub-sampled). The multi-plane API * allows configuring the resolution, the pixel format and a per-plane stride * length and total size. * * Packed image formats, which occupy a single memory area, are easily described * through the single-plane API. When used on a video device that implements the * multi-plane API, only the size and stride information contained in the first * plane are taken into account. * * Planar image formats, which occupy distinct memory areas, are easily * described through the multi-plane APIs. When used on a video device that * implements the single-plane API, all planes are stored one after the other * in a contiguous memory area, and it is not possible to configure per-plane * stride length and size, but only a global stride length which is applied to * all planes. * * The V4L2DeviceFormat class describes both packed and planar image formats, * regardless of the API type (single or multi plane) implemented by the video * device the format has to be applied to. The total size and bytes per line * of images represented with packed formats are configured using the first * entry of the V4L2DeviceFormat::planes array, while the per-plane size and * per-plane stride length of images represented with planar image formats are * configured using the opportune number of entries of the * V4L2DeviceFormat::planes array, as prescribed by the image format * definition (semi-planar formats use 2 entries, while planar formats use the * whole 3 entries). The number of valid entries of the * V4L2DeviceFormat::planes array is defined by the * V4L2DeviceFormat::planesCount value. */ /** * \struct V4L2DeviceFormat::Plane * \brief Per-plane memory size information * \var V4L2DeviceFormat::Plane::size * \brief The plane total memory size (in bytes) * \var V4L2DeviceFormat::Plane::bpl * \brief The plane line stride (in bytes) */ /** * \var V4L2DeviceFormat::size * \brief The image size in pixels */ /** * \var V4L2DeviceFormat::fourcc * \brief The fourcc code describing the pixel encoding scheme * * The fourcc code, as defined by the V4L2 API with the V4L2_PIX_FMT_* macros, * that identifies the image format pixel encoding scheme. */ /** * \var V4L2DeviceFormat::colorSpace * \brief The color space of the pixels * * The color space of the image. When setting the format this may be * unset, in which case the driver gets to use its default color space. * After being set, this value should contain the color space that * was actually used. If this value is unset, then the color space chosen * by the driver could not be represented by the ColorSpace class (and * should probably be added). * * It is up to the pipeline handler or application to check if the * resulting color space is acceptable. */ /** * \var V4L2DeviceFormat::planes * \brief The per-plane memory size information * * Images are stored in memory in one or more data planes. Each data plane has a * specific line stride and memory size, which could differ from the image * visible sizes to accommodate padding at the end of lines and end of planes. * Only the first \ref planesCount entries are considered valid. */ /** * \var V4L2DeviceFormat::planesCount * \brief The number of valid data planes */ /** * \brief Assemble and return a string describing the format * \return A string describing the V4L2DeviceFormat */ const std::string V4L2DeviceFormat::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Insert a text representation of a V4L2DeviceFormat into an output * stream * \param[in] out The output stream * \param[in] f The V4L2DeviceFormat * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const V4L2DeviceFormat &f) { out << f.size << "-" << f.fourcc; return out; } /** * \class V4L2VideoDevice * \brief V4L2VideoDevice object and API * * The V4L2VideoDevice class models an instance of a V4L2 video device. * It is constructed with the path to a V4L2 video device node. The device node * is only opened upon a call to open() which must be checked for success. * * The video device capabilities are validated when the device is opened and the * device is rejected if it is not a suitable V4L2 capture or output video * device, or if the video device does not support streaming I/O. * * No API call other than open(), isOpen() and close() shall be called on an * unopened device instance. * * The V4L2VideoDevice class supports the V4L2 MMAP and DMABUF memory types: * * - The allocateBuffers() function wraps buffer allocation with the V4L2 MMAP * memory type. It requests buffers from the driver, allocating the * corresponding memory, and exports them as a set of FrameBuffer objects. * Upon successful return the driver's internal buffer management is * initialized in MMAP mode, and the video device is ready to accept * queueBuffer() calls. * * This is the most traditional V4L2 buffer management, and is mostly useful * to support internal buffer pools in pipeline handlers, either for CPU * consumption (such as statistics or parameters pools), or for internal * image buffers shared between devices. * * - The exportBuffers() function operates similarly to allocateBuffers(), but * leaves the driver's internal buffer management uninitialized. It uses the * V4L2 buffer orphaning support to allocate buffers with the MMAP method, * export them as a set of FrameBuffer objects, and reset the driver's * internal buffer management. The video device shall be initialized with * importBuffers() or allocateBuffers() before it can accept queueBuffer() * calls. The exported buffers are directly usable with any V4L2 video device * in DMABUF mode, or with other dmabuf importers. * * This method is mostly useful to implement buffer allocation helpers or to * allocate ancillary buffers, when a V4L2 video device is used in DMABUF * mode but no other source of buffers is available. An example use case * would be allocation of scratch buffers to be used in case of buffer * underruns on a video device that is otherwise supplied with external * buffers. * * - The importBuffers() function initializes the driver's buffer management to * import buffers in DMABUF mode. It requests buffers from the driver, but * doesn't allocate memory. Upon successful return, the video device is ready * to accept queueBuffer() calls. The buffers to be imported are provided to * queueBuffer(), and may be supplied externally, or come from a previous * exportBuffers() call. * * This is the usual buffers initialization method for video devices whose * buffers are exposed outside of libcamera. It is also typically used on one * of the two video device that participate in buffer sharing inside * pipelines, the other video device typically using allocateBuffers(). * * - The releaseBuffers() function resets the driver's internal buffer * management that was initialized by a previous call to allocateBuffers() or * importBuffers(). Any memory allocated by allocateBuffers() is freed. * Buffer exported by exportBuffers() are not affected by this function. * * The V4L2VideoDevice class tracks queued buffers and handles buffer events. It * automatically dequeues completed buffers and emits the \ref bufferReady * signal. * * Upon destruction any device left open will be closed, and any resources * released. * * \context This class is \threadbound. */ /** * \typedef V4L2VideoDevice::Formats * \brief A map of supported V4L2 pixel formats to frame sizes */ /** * \brief Construct a V4L2VideoDevice * \param[in] deviceNode The file-system path to the video device node */ V4L2VideoDevice::V4L2VideoDevice(const std::string &deviceNode) : V4L2Device(deviceNode), formatInfo_(nullptr), cache_(nullptr), fdBufferNotifier_(nullptr), state_(State::Stopped), watchdogDuration_(0.0) { /* * We default to an MMAP based CAPTURE video device, however this will * be updated based upon the device capabilities. */ bufferType_ = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; memoryType_ = V4L2_MEMORY_MMAP; } /** * \brief Construct a V4L2VideoDevice from a MediaEntity * \param[in] entity The MediaEntity to build the video device from * * Construct a V4L2VideoDevice from a MediaEntity's device node path. */ V4L2VideoDevice::V4L2VideoDevice(const MediaEntity *entity) : V4L2VideoDevice(entity->deviceNode()) { watchdog_.timeout.connect(this, &V4L2VideoDevice::watchdogExpired); } V4L2VideoDevice::~V4L2VideoDevice() { close(); } /** * \brief Open the V4L2 video device node and query its capabilities * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::open() { int ret; ret = V4L2Device::open(O_RDWR | O_NONBLOCK); if (ret < 0) return ret; ret = ioctl(VIDIOC_QUERYCAP, &caps_); if (ret < 0) { LOG(V4L2, Error) << "Failed to query device capabilities: " << strerror(-ret); return ret; } if (caps_.version < KERNEL_VERSION(5, 0, 0)) { LOG(V4L2, Error) << "V4L2 API v" << (caps_.version >> 16) << "." << ((caps_.version >> 8) & 0xff) << "." << (caps_.version & 0xff) << " too old, v5.0.0 or later is required"; return -EINVAL; } if (!caps_.hasStreaming()) { LOG(V4L2, Error) << "Device does not support streaming I/O"; return -EINVAL; } /* * Set buffer type and wait for read notifications on CAPTURE video * devices (POLLIN), and write notifications for OUTPUT video devices * (POLLOUT). */ EventNotifier::Type notifierType; if (caps_.isVideoCapture()) { notifierType = EventNotifier::Read; bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE : V4L2_BUF_TYPE_VIDEO_CAPTURE; } else if (caps_.isVideoOutput()) { notifierType = EventNotifier::Write; bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE : V4L2_BUF_TYPE_VIDEO_OUTPUT; } else if (caps_.isMetaCapture()) { notifierType = EventNotifier::Read; bufferType_ = V4L2_BUF_TYPE_META_CAPTURE; } else if (caps_.isMetaOutput()) { notifierType = EventNotifier::Write; bufferType_ = V4L2_BUF_TYPE_META_OUTPUT; } else { LOG(V4L2, Error) << "Device is not a supported type"; return -EINVAL; } fdBufferNotifier_ = new EventNotifier(fd(), notifierType); fdBufferNotifier_->activated.connect(this, &V4L2VideoDevice::bufferAvailable); fdBufferNotifier_->setEnabled(false); LOG(V4L2, Debug) << "Opened device " << caps_.bus_info() << ": " << caps_.driver() << ": " << caps_.card(); ret = initFormats(); if (ret) return ret; return 0; } /** * \brief Open a V4L2 video device from an opened file handle and query its * capabilities * \param[in] handle The file descriptor to set * \param[in] type The device type to operate on * * This function opens a video device from the existing file descriptor \a * handle. Like open(), this function queries the capabilities of the device, * but handles it according to the given device \a type instead of determining * its type from the capabilities. This can be used to force a given device type * for memory-to-memory devices. * * The file descriptor \a handle is duplicated, no reference to the original * handle is kept. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::open(SharedFD handle, enum v4l2_buf_type type) { int ret; UniqueFD newFd = handle.dup(); if (!newFd.isValid()) { ret = -errno; LOG(V4L2, Error) << "Failed to duplicate file handle: " << strerror(-ret); return ret; } ret = V4L2Device::setFd(std::move(newFd)); if (ret < 0) { LOG(V4L2, Error) << "Failed to set file handle: " << strerror(-ret); return ret; } ret = ioctl(VIDIOC_QUERYCAP, &caps_); if (ret < 0) { LOG(V4L2, Error) << "Failed to query device capabilities: " << strerror(-ret); return ret; } if (!caps_.hasStreaming()) { LOG(V4L2, Error) << "Device does not support streaming I/O"; return -EINVAL; } /* * Set buffer type and wait for read notifications on CAPTURE video * devices (POLLIN), and write notifications for OUTPUT video devices * (POLLOUT). */ EventNotifier::Type notifierType; switch (type) { case V4L2_BUF_TYPE_VIDEO_OUTPUT: notifierType = EventNotifier::Write; bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE : V4L2_BUF_TYPE_VIDEO_OUTPUT; break; case V4L2_BUF_TYPE_VIDEO_CAPTURE: notifierType = EventNotifier::Read; bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE : V4L2_BUF_TYPE_VIDEO_CAPTURE; break; default: LOG(V4L2, Error) << "Unsupported buffer type"; return -EINVAL; } fdBufferNotifier_ = new EventNotifier(fd(), notifierType); fdBufferNotifier_->activated.connect(this, &V4L2VideoDevice::bufferAvailable); fdBufferNotifier_->setEnabled(false); LOG(V4L2, Debug) << "Opened device " << caps_.bus_info() << ": " << caps_.driver() << ": " << caps_.card(); ret = initFormats(); if (ret) return ret; return 0; } int V4L2VideoDevice::initFormats() { const std::vector<V4L2PixelFormat> &deviceFormats = enumPixelformats(0); if (deviceFormats.empty()) { LOG(V4L2, Error) << "Failed to initialize device formats"; return -EINVAL; } pixelFormats_ = { deviceFormats.begin(), deviceFormats.end() }; int ret = getFormat(&format_); if (ret) { LOG(V4L2, Error) << "Failed to get format"; return ret; } formatInfo_ = &PixelFormatInfo::info(format_.fourcc); return 0; } /** * \brief Close the video device, releasing any resources acquired by open() */ void V4L2VideoDevice::close() { if (!isOpen()) return; releaseBuffers(); delete fdBufferNotifier_; formatInfo_ = nullptr; V4L2Device::close(); } /** * \fn V4L2VideoDevice::driverName() * \brief Retrieve the name of the V4L2 device driver * \return The string containing the driver name */ /** * \fn V4L2VideoDevice::deviceName() * \brief Retrieve the name of the V4L2 video device * \return The string containing the device name */ /** * \fn V4L2VideoDevice::busName() * \brief Retrieve the location of the device in the system * \return The string containing the device location */ /** * \fn V4L2VideoDevice::caps() * \brief Retrieve the device V4L2 capabilities * \return The device V4L2 capabilities */ std::string V4L2VideoDevice::logPrefix() const { return deviceNode() + "[" + std::to_string(fd()) + (V4L2_TYPE_IS_OUTPUT(bufferType_) ? ":out]" : ":cap]"); } /** * \brief Retrieve the image format set on the V4L2 video device * \param[out] format The image format applied on the video device * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::getFormat(V4L2DeviceFormat *format) { if (caps_.isMeta()) return getFormatMeta(format); else if (caps_.isMultiplanar()) return getFormatMultiplane(format); else return getFormatSingleplane(format); } /** * \brief Try an image format on the V4L2 video device * \param[inout] format The image format to test applicability to the video device * * Try the supplied \a format on the video device without applying it, returning * the format that would be applied. This is equivalent to setFormat(), except * that the device configuration is not changed. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::tryFormat(V4L2DeviceFormat *format) { if (caps_.isMeta()) return trySetFormatMeta(format, false); else if (caps_.isMultiplanar()) return trySetFormatMultiplane(format, false); else return trySetFormatSingleplane(format, false); } /** * \brief Configure an image format on the V4L2 video device * \param[inout] format The image format to apply to the video device * * Apply the supplied \a format to the video device, and return the actually * applied format parameters, as \ref V4L2VideoDevice::getFormat would do. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::setFormat(V4L2DeviceFormat *format) { int ret = 0; if (caps_.isMeta()) ret = trySetFormatMeta(format, true); else if (caps_.isMultiplanar()) ret = trySetFormatMultiplane(format, true); else ret = trySetFormatSingleplane(format, true); /* Cache the set format on success. */ if (ret) return ret; format_ = *format; formatInfo_ = &PixelFormatInfo::info(format_.fourcc); return 0; } int V4L2VideoDevice::getFormatMeta(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_meta_format *pix = &v4l2Format.fmt.meta; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = 0; format->size.height = 0; format->fourcc = V4L2PixelFormat(pix->dataformat); format->planesCount = 1; format->planes[0].bpl = pix->buffersize; format->planes[0].size = pix->buffersize; return 0; } int V4L2VideoDevice::trySetFormatMeta(V4L2DeviceFormat *format, bool set) { struct v4l2_format v4l2Format = {}; struct v4l2_meta_format *pix = &v4l2Format.fmt.meta; int ret; v4l2Format.type = bufferType_; pix->dataformat = format->fourcc; pix->buffersize = format->planes[0].size; ret = ioctl(set ? VIDIOC_S_FMT : VIDIOC_TRY_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to " << (set ? "set" : "try") << " format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the video device, * which might differ from the requested one. */ format->size.width = 0; format->size.height = 0; format->fourcc = V4L2PixelFormat(pix->dataformat); format->planesCount = 1; format->planes[0].bpl = pix->buffersize; format->planes[0].size = pix->buffersize; return 0; } template<typename T> std::optional<ColorSpace> V4L2VideoDevice::toColorSpace(const T &v4l2Format) { V4L2PixelFormat fourcc{ v4l2Format.pixelformat }; return V4L2Device::toColorSpace(v4l2Format, PixelFormatInfo::info(fourcc).colourEncoding); } int V4L2VideoDevice::getFormatMultiplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format_mplane *pix = &v4l2Format.fmt.pix_mp; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = pix->width; format->size.height = pix->height; format->fourcc = V4L2PixelFormat(pix->pixelformat); format->planesCount = pix->num_planes; format->colorSpace = toColorSpace(*pix); for (unsigned int i = 0; i < format->planesCount; ++i) { format->planes[i].bpl = pix->plane_fmt[i].bytesperline; format->planes[i].size = pix->plane_fmt[i].sizeimage; } return 0; } int V4L2VideoDevice::trySetFormatMultiplane(V4L2DeviceFormat *format, bool set) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format_mplane *pix = &v4l2Format.fmt.pix_mp; int ret; v4l2Format.type = bufferType_; pix->width = format->size.width; pix->height = format->size.height; pix->pixelformat = format->fourcc; pix->num_planes = format->planesCount; pix->field = V4L2_FIELD_NONE; if (format->colorSpace) { fromColorSpace(format->colorSpace, *pix); if (caps_.isVideoCapture()) pix->flags |= V4L2_PIX_FMT_FLAG_SET_CSC; } ASSERT(pix->num_planes <= std::size(pix->plane_fmt)); for (unsigned int i = 0; i < pix->num_planes; ++i) { pix->plane_fmt[i].bytesperline = format->planes[i].bpl; pix->plane_fmt[i].sizeimage = format->planes[i].size; } ret = ioctl(set ? VIDIOC_S_FMT : VIDIOC_TRY_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to " << (set ? "set" : "try") << " format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the video device, * which might differ from the requested one. */ format->size.width = pix->width; format->size.height = pix->height; format->fourcc = V4L2PixelFormat(pix->pixelformat); format->planesCount = pix->num_planes; for (unsigned int i = 0; i < format->planesCount; ++i) { format->planes[i].bpl = pix->plane_fmt[i].bytesperline; format->planes[i].size = pix->plane_fmt[i].sizeimage; } format->colorSpace = toColorSpace(*pix); return 0; } int V4L2VideoDevice::getFormatSingleplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format *pix = &v4l2Format.fmt.pix; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = pix->width; format->size.height = pix->height; format->fourcc = V4L2PixelFormat(pix->pixelformat); format->planesCount = 1; format->planes[0].bpl = pix->bytesperline; format->planes[0].size = pix->sizeimage; format->colorSpace = toColorSpace(*pix); return 0; } int V4L2VideoDevice::trySetFormatSingleplane(V4L2DeviceFormat *format, bool set) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format *pix = &v4l2Format.fmt.pix; int ret; v4l2Format.type = bufferType_; pix->width = format->size.width; pix->height = format->size.height; pix->pixelformat = format->fourcc; pix->bytesperline = format->planes[0].bpl; pix->field = V4L2_FIELD_NONE; if (format->colorSpace) { fromColorSpace(format->colorSpace, *pix); if (caps_.isVideoCapture()) pix->flags |= V4L2_PIX_FMT_FLAG_SET_CSC; } ret = ioctl(set ? VIDIOC_S_FMT : VIDIOC_TRY_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to " << (set ? "set" : "try") << " format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the device, * which might differ from the requested one. */ format->size.width = pix->width; format->size.height = pix->height; format->fourcc = V4L2PixelFormat(pix->pixelformat); format->planesCount = 1; format->planes[0].bpl = pix->bytesperline; format->planes[0].size = pix->sizeimage; format->colorSpace = toColorSpace(*pix); return 0; } /** * \brief Enumerate all pixel formats and frame sizes * \param[in] code Restrict formats to this media bus code. * * Enumerate all pixel formats and frame sizes supported by the video device. * If the \a code argument is not zero, only formats compatible with that media * bus code will be enumerated. * * \return A list of the supported video device formats */ V4L2VideoDevice::Formats V4L2VideoDevice::formats(uint32_t code) { Formats formats; for (V4L2PixelFormat pixelFormat : enumPixelformats(code)) { std::vector<SizeRange> sizes = enumSizes(pixelFormat); if (sizes.empty()) return {}; if (formats.find(pixelFormat) != formats.end()) { LOG(V4L2, Error) << "Could not add sizes for pixel format " << pixelFormat; return {}; } formats.emplace(pixelFormat, sizes); } return formats; } std::vector<V4L2PixelFormat> V4L2VideoDevice::enumPixelformats(uint32_t code) { std::vector<V4L2PixelFormat> formats; int ret; if (code && !caps_.hasMediaController()) { LOG(V4L2, Error) << "Media bus code filtering not supported by the device"; return {}; } for (unsigned int index = 0; ; index++) { struct v4l2_fmtdesc pixelformatEnum = {}; pixelformatEnum.index = index; pixelformatEnum.type = bufferType_; pixelformatEnum.mbus_code = code; ret = ioctl(VIDIOC_ENUM_FMT, &pixelformatEnum); if (ret) break; formats.push_back(V4L2PixelFormat(pixelformatEnum.pixelformat)); } if (ret && ret != -EINVAL) { LOG(V4L2, Error) << "Unable to enumerate pixel formats: " << strerror(-ret); return {}; } return formats; } std::vector<SizeRange> V4L2VideoDevice::enumSizes(V4L2PixelFormat pixelFormat) { std::vector<SizeRange> sizes; int ret; for (unsigned int index = 0;; index++) { struct v4l2_frmsizeenum frameSize = {}; frameSize.index = index; frameSize.pixel_format = pixelFormat; ret = ioctl(VIDIOC_ENUM_FRAMESIZES, &frameSize); if (ret) break; if (index != 0 && frameSize.type != V4L2_FRMSIZE_TYPE_DISCRETE) { LOG(V4L2, Error) << "Non-zero index for non discrete type"; return {}; } switch (frameSize.type) { case V4L2_FRMSIZE_TYPE_DISCRETE: sizes.emplace_back(Size{ frameSize.discrete.width, frameSize.discrete.height }); break; case V4L2_FRMSIZE_TYPE_CONTINUOUS: sizes.emplace_back(Size{ frameSize.stepwise.min_width, frameSize.stepwise.min_height }, Size{ frameSize.stepwise.max_width, frameSize.stepwise.max_height }); break; case V4L2_FRMSIZE_TYPE_STEPWISE: sizes.emplace_back(Size{ frameSize.stepwise.min_width, frameSize.stepwise.min_height }, Size{ frameSize.stepwise.max_width, frameSize.stepwise.max_height }, frameSize.stepwise.step_width, frameSize.stepwise.step_height); break; default: LOG(V4L2, Error) << "Unknown VIDIOC_ENUM_FRAMESIZES type " << frameSize.type; return {}; } } if (ret && ret != -EINVAL) { LOG(V4L2, Error) << "Unable to enumerate frame sizes: " << strerror(-ret); return {}; } return sizes; } /** * \brief Set a selection rectangle \a rect for \a target * \param[in] target The selection target defined by the V4L2_SEL_TGT_* flags * \param[inout] rect The selection rectangle to be applied * * \todo Define a V4L2SelectionTarget enum for the selection target * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::setSelection(unsigned int target, Rectangle *rect) { struct v4l2_selection sel = {}; sel.type = bufferType_; sel.target = target; sel.flags = 0; sel.r.left = rect->x; sel.r.top = rect->y; sel.r.width = rect->width; sel.r.height = rect->height; int ret = ioctl(VIDIOC_S_SELECTION, &sel); if (ret < 0) { LOG(V4L2, Error) << "Unable to set rectangle " << target << ": " << strerror(-ret); return ret; } rect->x = sel.r.left; rect->y = sel.r.top; rect->width = sel.r.width; rect->height = sel.r.height; return 0; } int V4L2VideoDevice::requestBuffers(unsigned int count, enum v4l2_memory memoryType) { struct v4l2_requestbuffers rb = {}; int ret; rb.count = count; rb.type = bufferType_; rb.memory = memoryType; ret = ioctl(VIDIOC_REQBUFS, &rb); if (ret < 0) { LOG(V4L2, Error) << "Unable to request " << count << " buffers: " << strerror(-ret); return ret; } if (rb.count < count) { LOG(V4L2, Error) << "Not enough buffers provided by V4L2VideoDevice"; requestBuffers(0, memoryType); return -ENOMEM; } LOG(V4L2, Debug) << rb.count << " buffers requested."; return 0; } /** * \brief Allocate and export buffers from the video device * \param[in] count Number of buffers to allocate * \param[out] buffers Vector to store allocated buffers * * This function wraps buffer allocation with the V4L2 MMAP memory type. It * requests \a count buffers from the driver, allocating the corresponding * memory, and exports them as a set of FrameBuffer objects in \a buffers. Upon * successful return the driver's internal buffer management is initialized in * MMAP mode, and the video device is ready to accept queueBuffer() calls. * * The number of planes and their offsets and sizes are determined by the * currently active format on the device as set by setFormat(). They do not map * to the V4L2 buffer planes, but to colour planes of the pixel format. For * instance, if the active format is formats::NV12, the allocated FrameBuffer * instances will have two planes, for the luma and chroma components, * regardless of whether the device uses V4L2_PIX_FMT_NV12 or * V4L2_PIX_FMT_NV12M. * * Buffers allocated with this function shall later be free with * releaseBuffers(). If buffers have already been allocated with * allocateBuffers() or imported with importBuffers(), this function returns * -EBUSY. * * \return The number of allocated buffers on success or a negative error code * otherwise * \retval -EBUSY buffers have already been allocated or imported */ int V4L2VideoDevice::allocateBuffers(unsigned int count, std::vector<std::unique_ptr<FrameBuffer>> *buffers) { int ret = createBuffers(count, buffers); if (ret < 0) return ret; cache_ = new V4L2BufferCache(*buffers); memoryType_ = V4L2_MEMORY_MMAP; return ret; } /** * \brief Export buffers from the video device * \param[in] count Number of buffers to allocate * \param[out] buffers Vector to store allocated buffers * * This function allocates \a count buffer from the video device and exports * them as dmabuf objects, stored in \a buffers. Unlike allocateBuffers(), this * function leaves the driver's internal buffer management uninitialized. The * video device shall be initialized with importBuffers() or allocateBuffers() * before it can accept queueBuffer() calls. The exported buffers are directly * usable with any V4L2 video device in DMABUF mode, or with other dmabuf * importers. * * The number of planes and their offsets and sizes are determined by the * currently active format on the device as set by setFormat(). They do not map * to the V4L2 buffer planes, but to colour planes of the pixel format. For * instance, if the active format is formats::NV12, the allocated FrameBuffer * instances will have two planes, for the luma and chroma components, * regardless of whether the device uses V4L2_PIX_FMT_NV12 or * V4L2_PIX_FMT_NV12M. * * Multiple independent sets of buffers can be allocated with multiple calls to * this function. Device-specific limitations may apply regarding the minimum * and maximum number of buffers per set, or to total amount of allocated * memory. The exported dmabuf lifetime is tied to the returned \a buffers. To * free a buffer, the caller shall delete the corresponding FrameBuffer * instance. No bookkeeping and automatic free is performed by the * V4L2VideoDevice class. * * If buffers have already been allocated with allocateBuffers() or imported * with importBuffers(), this function returns -EBUSY. * * \return The number of allocated buffers on success or a negative error code * otherwise * \retval -EBUSY buffers have already been allocated or imported */ int V4L2VideoDevice::exportBuffers(unsigned int count, std::vector<std::unique_ptr<FrameBuffer>> *buffers) { int ret = createBuffers(count, buffers); if (ret < 0) return ret; requestBuffers(0, V4L2_MEMORY_MMAP); return ret; } int V4L2VideoDevice::createBuffers(unsigned int count, std::vector<std::unique_ptr<FrameBuffer>> *buffers) { if (cache_) { LOG(V4L2, Error) << "Buffers already allocated"; return -EINVAL; } int ret = requestBuffers(count, V4L2_MEMORY_MMAP); if (ret < 0) return ret; for (unsigned i = 0; i < count; ++i) { std::unique_ptr<FrameBuffer> buffer = createBuffer(i); if (!buffer) { LOG(V4L2, Error) << "Unable to create buffer"; requestBuffers(0, V4L2_MEMORY_MMAP); buffers->clear(); return -EINVAL; } buffers->push_back(std::move(buffer)); } return count; } std::unique_ptr<FrameBuffer> V4L2VideoDevice::createBuffer(unsigned int index) { struct v4l2_plane v4l2Planes[VIDEO_MAX_PLANES] = {}; struct v4l2_buffer buf = {}; buf.index = index; buf.type = bufferType_; buf.length = std::size(v4l2Planes); buf.m.planes = v4l2Planes; int ret = ioctl(VIDIOC_QUERYBUF, &buf); if (ret < 0) { LOG(V4L2, Error) << "Unable to query buffer " << index << ": " << strerror(-ret); return nullptr; } const bool multiPlanar = V4L2_TYPE_IS_MULTIPLANAR(buf.type); const unsigned int numPlanes = multiPlanar ? buf.length : 1; if (numPlanes == 0 || numPlanes > VIDEO_MAX_PLANES) { LOG(V4L2, Error) << "Invalid number of planes"; return nullptr; } std::vector<FrameBuffer::Plane> planes; for (unsigned int nplane = 0; nplane < numPlanes; nplane++) { UniqueFD fd = exportDmabufFd(buf.index, nplane); if (!fd.isValid()) return nullptr; FrameBuffer::Plane plane; plane.fd = SharedFD(std::move(fd)); /* * V4L2 API doesn't provide dmabuf offset information of plane. * Set 0 as a placeholder offset. * \todo Set the right offset once V4L2 API provides a way. */ plane.offset = 0; plane.length = multiPlanar ? buf.m.planes[nplane].length : buf.length; planes.push_back(std::move(plane)); } /* * If we have a multi-planar format with a V4L2 single-planar buffer, * split the single V4L2 plane into multiple FrameBuffer planes by * computing the offsets manually. * * The format info is not guaranteed to be valid, as there are no * PixelFormatInfo for metadata formats, so check it first. */ if (formatInfo_->isValid() && formatInfo_->numPlanes() != numPlanes) { /* * There's no valid situation where the number of colour planes * differs from the number of V4L2 planes and the V4L2 buffer * has more than one plane. */ ASSERT(numPlanes == 1u); planes.resize(formatInfo_->numPlanes()); const SharedFD &fd = planes[0].fd; size_t offset = 0; for (auto [i, plane] : utils::enumerate(planes)) { /* * The stride is reported by V4L2 for the first plane * only. Compute the stride of the other planes by * taking the horizontal subsampling factor into * account, which is equal to the bytesPerGroup ratio of * the planes. */ unsigned int stride = format_.planes[0].bpl * formatInfo_->planes[i].bytesPerGroup / formatInfo_->planes[0].bytesPerGroup; plane.fd = fd; plane.offset = offset; plane.length = formatInfo_->planeSize(format_.size.height, i, stride); offset += plane.length; } } return std::make_unique<FrameBuffer>(planes); } UniqueFD V4L2VideoDevice::exportDmabufFd(unsigned int index, unsigned int plane) { struct v4l2_exportbuffer expbuf = {}; int ret; expbuf.type = bufferType_; expbuf.index = index; expbuf.plane = plane; expbuf.flags = O_CLOEXEC | O_RDWR; ret = ioctl(VIDIOC_EXPBUF, &expbuf); if (ret < 0) { LOG(V4L2, Error) << "Failed to export buffer: " << strerror(-ret); return {}; } return UniqueFD(expbuf.fd); } /** * \brief Prepare the device to import \a count buffers * \param[in] count Number of buffers to prepare to import * * This function initializes the driver's buffer management to import buffers * in DMABUF mode. It requests buffers from the driver, but doesn't allocate * memory. * * Upon successful return, the video device is ready to accept queueBuffer() * calls. The buffers to be imported are provided to queueBuffer(), and may be * supplied externally, or come from a previous exportBuffers() call. * * Device initialization performed by this function shall later be cleaned up * with releaseBuffers(). If buffers have already been allocated with * allocateBuffers() or imported with importBuffers(), this function returns * -EBUSY. * * \return 0 on success or a negative error code otherwise * \retval -EBUSY buffers have already been allocated or imported */ int V4L2VideoDevice::importBuffers(unsigned int count) { if (cache_) { LOG(V4L2, Error) << "Buffers already allocated"; return -EINVAL; } memoryType_ = V4L2_MEMORY_DMABUF; int ret = requestBuffers(count, V4L2_MEMORY_DMABUF); if (ret) return ret; cache_ = new V4L2BufferCache(count); LOG(V4L2, Debug) << "Prepared to import " << count << " buffers"; return 0; } /** * \brief Release resources allocated by allocateBuffers() or importBuffers() * * This function resets the driver's internal buffer management that was * initialized by a previous call to allocateBuffers() or importBuffers(). Any * memory allocated by allocateBuffers() is freed. Buffer exported by * exportBuffers(), if any, are not affected. */ int V4L2VideoDevice::releaseBuffers() { if (!cache_) return 0; LOG(V4L2, Debug) << "Releasing buffers"; delete cache_; cache_ = nullptr; return requestBuffers(0, memoryType_); } /** * \brief Queue a buffer to the video device * \param[in] buffer The buffer to be queued * * For capture video devices the \a buffer will be filled with data by the * device. For output video devices the \a buffer shall contain valid data and * will be processed by the device. Once the device has finished processing the * buffer, it will be available for dequeue. * * The best available V4L2 buffer is picked for \a buffer using the V4L2 buffer * cache. * * Note that queueBuffer() will fail if the device is in the process of being * stopped from a streaming state through streamOff(). * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer) { struct v4l2_plane v4l2Planes[VIDEO_MAX_PLANES] = {}; struct v4l2_buffer buf = {}; int ret; if (state_ == State::Stopping) { LOG(V4L2, Error) << "Device is in a stopping state."; return -ESHUTDOWN; } /* * Pipeline handlers should not requeue buffers after releasing the * buffers on the device. Any occurence of this error should be fixed * in the pipeline handler directly. */ if (!cache_) { LOG(V4L2, Fatal) << "No BufferCache available to queue."; return -ENOENT; } ret = cache_->get(*buffer); if (ret < 0) return ret; buf.index = ret; buf.type = bufferType_; buf.memory = memoryType_; buf.field = V4L2_FIELD_NONE; bool multiPlanar = V4L2_TYPE_IS_MULTIPLANAR(buf.type); const std::vector<FrameBuffer::Plane> &planes = buffer->planes(); const unsigned int numV4l2Planes = format_.planesCount; /* * Ensure that the frame buffer has enough planes, and that they're * contiguous if the V4L2 format requires them to be. */ if (planes.size() < numV4l2Planes) { LOG(V4L2, Error) << "Frame buffer has too few planes"; return -EINVAL; } if (planes.size() != numV4l2Planes && !buffer->_d()->isContiguous()) { LOG(V4L2, Error) << "Device format requires contiguous buffer"; return -EINVAL; } if (buf.memory == V4L2_MEMORY_DMABUF) { if (multiPlanar) { for (unsigned int p = 0; p < numV4l2Planes; ++p) v4l2Planes[p].m.fd = planes[p].fd.get(); } else { buf.m.fd = planes[0].fd.get(); } } if (multiPlanar) { buf.length = numV4l2Planes; buf.m.planes = v4l2Planes; } if (V4L2_TYPE_IS_OUTPUT(buf.type)) { const FrameMetadata &metadata = buffer->metadata(); for (const auto &plane : metadata.planes()) { if (!plane.bytesused) LOG(V4L2, Warning) << "byteused == 0 is deprecated"; } if (numV4l2Planes != planes.size()) { /* * If we have a multi-planar buffer with a V4L2 * single-planar format, coalesce all planes. The length * and number of bytes used may only differ in the last * plane as any other situation can't be represented. */ unsigned int bytesused = 0; unsigned int length = 0; for (auto [i, plane] : utils::enumerate(planes)) { bytesused += metadata.planes()[i].bytesused; length += plane.length; if (i != planes.size() - 1 && bytesused != length) { LOG(V4L2, Error) << "Holes in multi-planar buffer not supported"; return -EINVAL; } } if (multiPlanar) { v4l2Planes[0].bytesused = bytesused; v4l2Planes[0].length = length; } else { buf.bytesused = bytesused; buf.length = length; } } else if (multiPlanar) { /* * If we use the multi-planar API, fill in the planes. * The number of planes in the frame buffer and in the * V4L2 buffer is guaranteed to be equal at this point. */ for (auto [i, plane] : utils::enumerate(planes)) { v4l2Planes[i].bytesused = metadata.planes()[i].bytesused; v4l2Planes[i].length = plane.length; } } else { /* * Single-planar API with a single plane in the buffer * is trivial to handle. */ buf.bytesused = metadata.planes()[0].bytesused; buf.length = planes[0].length; } /* * Timestamps are to be supplied if the device is a mem-to-mem * device. The drivers will have V4L2_BUF_FLAG_TIMESTAMP_COPY * set hence these timestamps will be copied from the output * buffers to capture buffers. If the device is not mem-to-mem, * there is no harm in setting the timestamps as they will be * ignored (and over-written). */ buf.timestamp.tv_sec = metadata.timestamp / 1000000000; buf.timestamp.tv_usec = (metadata.timestamp / 1000) % 1000000; } LOG(V4L2, Debug) << "Queueing buffer " << buf.index; ret = ioctl(VIDIOC_QBUF, &buf); if (ret < 0) { LOG(V4L2, Error) << "Failed to queue buffer " << buf.index << ": " << strerror(-ret); return ret; } if (queuedBuffers_.empty()) { fdBufferNotifier_->setEnabled(true); if (watchdogDuration_) watchdog_.start(std::chrono::duration_cast<std::chrono::milliseconds>(watchdogDuration_)); } queuedBuffers_[buf.index] = buffer; return 0; } /** * \brief Slot to handle completed buffer events from the V4L2 video device * * When this slot is called, a Buffer has become available from the device, and * will be emitted through the bufferReady Signal. * * For Capture video devices the FrameBuffer will contain valid data. * For Output video devices the FrameBuffer can be considered empty. */ void V4L2VideoDevice::bufferAvailable() { FrameBuffer *buffer = dequeueBuffer(); if (!buffer) return; /* Notify anyone listening to the device. */ bufferReady.emit(buffer); } /** * \brief Dequeue the next available buffer from the video device * * This function dequeues the next available buffer from the device. If no * buffer is available to be dequeued it will return nullptr immediately. * * \return A pointer to the dequeued buffer on success, or nullptr otherwise */ FrameBuffer *V4L2VideoDevice::dequeueBuffer() { struct v4l2_buffer buf = {}; struct v4l2_plane planes[VIDEO_MAX_PLANES] = {}; int ret; buf.type = bufferType_; buf.memory = memoryType_; bool multiPlanar = V4L2_TYPE_IS_MULTIPLANAR(buf.type); if (multiPlanar) { buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; } ret = ioctl(VIDIOC_DQBUF, &buf); if (ret < 0) { LOG(V4L2, Error) << "Failed to dequeue buffer: " << strerror(-ret); return nullptr; } LOG(V4L2, Debug) << "Dequeuing buffer " << buf.index; /* * If the video node fails to stream-on successfully (which can occur * when queuing a buffer), a vb2 kernel bug can lead to the buffer which * returns a failure upon queuing being mistakenly kept in the kernel. * This leads to the kernel notifying us that a buffer is available to * dequeue, which we have no awareness of being queued, and thus we will * not find it in the queuedBuffers_ list. * * Whilst this kernel bug has been fixed in mainline, ensure that we * safely ignore buffers which are unexpected to prevent crashes on * older kernels. */ auto it = queuedBuffers_.find(buf.index); if (it == queuedBuffers_.end()) { LOG(V4L2, Error) << "Dequeued unexpected buffer index " << buf.index; return nullptr; } cache_->put(buf.index); FrameBuffer *buffer = it->second; queuedBuffers_.erase(it); if (queuedBuffers_.empty()) { fdBufferNotifier_->setEnabled(false); watchdog_.stop(); } else if (watchdogDuration_) { /* * Restart the watchdog timer if there are buffers still queued * in the device. */ watchdog_.start(std::chrono::duration_cast<std::chrono::milliseconds>(watchdogDuration_)); } FrameMetadata &metadata = buffer->_d()->metadata(); metadata.status = buf.flags & V4L2_BUF_FLAG_ERROR ? FrameMetadata::FrameError : FrameMetadata::FrameSuccess; metadata.sequence = buf.sequence; metadata.timestamp = buf.timestamp.tv_sec * 1000000000ULL + buf.timestamp.tv_usec * 1000ULL; if (V4L2_TYPE_IS_OUTPUT(buf.type)) return buffer; /* * Detect kernel drivers which do not reset the sequence number to zero * on stream start. */ if (!firstFrame_) { if (buf.sequence) LOG(V4L2, Info) << "Zero sequence expected for first frame (got " << buf.sequence << ")"; firstFrame_ = buf.sequence; } metadata.sequence -= firstFrame_.value(); unsigned int numV4l2Planes = multiPlanar ? buf.length : 1; if (numV4l2Planes != buffer->planes().size()) { /* * If we have a multi-planar buffer with a V4L2 * single-planar format, split the V4L2 buffer across * the buffer planes. Only the last plane may have less * bytes used than its length. */ if (numV4l2Planes != 1) { LOG(V4L2, Error) << "Invalid number of planes (" << numV4l2Planes << " != " << buffer->planes().size() << ")"; metadata.status = FrameMetadata::FrameError; return buffer; } /* * With a V4L2 single-planar format, all the data is stored in * a single memory plane. The number of bytes used is conveyed * through that plane when using the V4L2 multi-planar API, or * set directly in the buffer when using the V4L2 single-planar * API. */ unsigned int bytesused = multiPlanar ? planes[0].bytesused : buf.bytesused; unsigned int remaining = bytesused; for (auto [i, plane] : utils::enumerate(buffer->planes())) { if (!remaining) { LOG(V4L2, Error) << "Dequeued buffer (" << bytesused << " bytes) too small for plane lengths " << utils::join(buffer->planes(), "/", [](const FrameBuffer::Plane &p) { return p.length; }); metadata.status = FrameMetadata::FrameError; return buffer; } metadata.planes()[i].bytesused = std::min(plane.length, remaining); remaining -= metadata.planes()[i].bytesused; } } else if (multiPlanar) { /* * If we use the multi-planar API, fill in the planes. * The number of planes in the frame buffer and in the * V4L2 buffer is guaranteed to be equal at this point. */ for (unsigned int i = 0; i < numV4l2Planes; ++i) metadata.planes()[i].bytesused = planes[i].bytesused; } else { metadata.planes()[0].bytesused = buf.bytesused; } return buffer; } /** * \var V4L2VideoDevice::bufferReady * \brief A Signal emitted when a framebuffer completes */ /** * \brief Start the video stream * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::streamOn() { int ret; firstFrame_.reset(); ret = ioctl(VIDIOC_STREAMON, &bufferType_); if (ret < 0) { LOG(V4L2, Error) << "Failed to start streaming: " << strerror(-ret); return ret; } state_ = State::Streaming; if (watchdogDuration_ && !queuedBuffers_.empty()) watchdog_.start(std::chrono::duration_cast<std::chrono::milliseconds>(watchdogDuration_)); return 0; } /** * \brief Stop the video stream * * Buffers that are still queued when the video stream is stopped are * immediately dequeued with their status set to FrameMetadata::FrameCancelled, * and the bufferReady signal is emitted for them. The order in which those * buffers are dequeued is not specified. * * This will be a no-op if the stream is not started in the first place and * has no queued buffers. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::streamOff() { int ret; if (state_ != State::Streaming && queuedBuffers_.empty()) return 0; if (watchdogDuration_.count()) watchdog_.stop(); ret = ioctl(VIDIOC_STREAMOFF, &bufferType_); if (ret < 0) { LOG(V4L2, Error) << "Failed to stop streaming: " << strerror(-ret); return ret; } state_ = State::Stopping; /* Send back all queued buffers. */ for (auto it : queuedBuffers_) { FrameBuffer *buffer = it.second; FrameMetadata &metadata = buffer->_d()->metadata(); cache_->put(it.first); metadata.status = FrameMetadata::FrameCancelled; bufferReady.emit(buffer); } ASSERT(cache_->isEmpty()); queuedBuffers_.clear(); fdBufferNotifier_->setEnabled(false); state_ = State::Stopped; return 0; } /** * \brief Set the dequeue timeout value * \param[in] timeout The timeout value to be used * * Sets a timeout value, given by \a timeout, that will be used by a watchdog * timer to ensure buffer dequeue events are periodically occurring when the * device is streaming. The watchdog timer is only active when the device is * streaming, so it is not necessary to disable it when the device stops * streaming. The timeout value can be safely updated at any time. * * If the timer expires, the \ref V4L2VideoDevice::dequeueTimeout signal is * emitted. This can typically be used by pipeline handlers to be notified of * stalled devices. * * Set \a timeout to 0 to disable the watchdog timer. */ void V4L2VideoDevice::setDequeueTimeout(utils::Duration timeout) { watchdogDuration_ = timeout; watchdog_.stop(); if (watchdogDuration_ && state_ == State::Streaming && !queuedBuffers_.empty()) watchdog_.start(std::chrono::duration_cast<std::chrono::milliseconds>(timeout)); } /** * \var V4L2VideoDevice::dequeueTimeout * \brief A Signal emitted when the dequeue watchdog timer expires */ /** * \brief Slot to handle an expired dequeue timer * * When this slot is called, the time between successive dequeue events is over * the required timeout. Emit the \ref V4L2VideoDevice::dequeueTimeout signal. */ void V4L2VideoDevice::watchdogExpired() { LOG(V4L2, Warning) << "Dequeue timer of " << watchdogDuration_ << " has expired!"; dequeueTimeout.emit(); } /** * \brief Create a new video device instance from \a entity in media device * \a media * \param[in] media The media device where the entity is registered * \param[in] entity The media entity name * * \return A newly created V4L2VideoDevice on success, nullptr otherwise */ std::unique_ptr<V4L2VideoDevice> V4L2VideoDevice::fromEntityName(const MediaDevice *media, const std::string &entity) { MediaEntity *mediaEntity = media->getEntityByName(entity); if (!mediaEntity) return nullptr; return std::make_unique<V4L2VideoDevice>(mediaEntity); } /** * \brief Convert \a PixelFormat to a V4L2PixelFormat supported by the device * \param[in] pixelFormat The PixelFormat to convert * * Convert \a pixelformat to a V4L2 FourCC that is known to be supported by * the video device. * * A V4L2VideoDevice may support different V4L2 pixel formats that map the same * PixelFormat. This is the case of the contiguous and non-contiguous variants * of multiplanar formats, and with the V4L2 MJPEG and JPEG pixel formats. * Converting a PixelFormat to a V4L2PixelFormat may thus have multiple answers. * * This function converts the \a pixelFormat using the list of V4L2 pixel * formats that the V4L2VideoDevice supports. This guarantees that the returned * V4L2PixelFormat will be valid for the device. If multiple matches are still * possible, contiguous variants are preferred. If the \a pixelFormat is not * supported by the device, the function returns an invalid V4L2PixelFormat. * * \return The V4L2PixelFormat corresponding to \a pixelFormat if supported by * the device, or an invalid V4L2PixelFormat otherwise */ V4L2PixelFormat V4L2VideoDevice::toV4L2PixelFormat(const PixelFormat &pixelFormat) const { const std::vector<V4L2PixelFormat> &v4l2PixelFormats = V4L2PixelFormat::fromPixelFormat(pixelFormat); for (const V4L2PixelFormat &v4l2Format : v4l2PixelFormats) { if (pixelFormats_.count(v4l2Format)) return v4l2Format; } return {}; } /** * \class V4L2M2MDevice * \brief Memory-to-Memory video device * * The V4L2M2MDevice manages two V4L2VideoDevice instances on the same * deviceNode which operate together using two queues to implement the V4L2 * Memory to Memory API. * * The two devices should be opened by calling open() on the V4L2M2MDevice, and * can be closed by calling close on the V4L2M2MDevice. * * Calling V4L2VideoDevice::open() and V4L2VideoDevice::close() on the capture * or output V4L2VideoDevice is not permitted. */ /** * \fn V4L2M2MDevice::output * \brief Retrieve the output V4L2VideoDevice instance * \return The output V4L2VideoDevice instance */ /** * \fn V4L2M2MDevice::capture * \brief Retrieve the capture V4L2VideoDevice instance * \return The capture V4L2VideoDevice instance */ /** * \brief Create a new V4L2M2MDevice from the \a deviceNode * \param[in] deviceNode The file-system path to the video device node */ V4L2M2MDevice::V4L2M2MDevice(const std::string &deviceNode) : deviceNode_(deviceNode) { output_ = new V4L2VideoDevice(deviceNode); capture_ = new V4L2VideoDevice(deviceNode); } V4L2M2MDevice::~V4L2M2MDevice() { delete capture_; delete output_; } /** * \brief Open a V4L2 Memory to Memory device * * Open the device node and prepare the two V4L2VideoDevice instances to handle * their respective buffer queues. * * \return 0 on success or a negative error code otherwise */ int V4L2M2MDevice::open() { int ret; /* * The output and capture V4L2VideoDevice instances use the same file * handle for the same device node. */ SharedFD fd(syscall(SYS_openat, AT_FDCWD, deviceNode_.c_str(), O_RDWR | O_NONBLOCK)); if (!fd.isValid()) { ret = -errno; LOG(V4L2, Error) << "Failed to open V4L2 M2M device: " << strerror(-ret); return ret; } ret = output_->open(fd, V4L2_BUF_TYPE_VIDEO_OUTPUT); if (ret) goto err; ret = capture_->open(fd, V4L2_BUF_TYPE_VIDEO_CAPTURE); if (ret) goto err; return 0; err: close(); return ret; } /** * \brief Close the memory-to-memory device, releasing any resources acquired by * open() */ void V4L2M2MDevice::close() { capture_->close(); output_->close(); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/process.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Process object */ #include "libcamera/internal/process.h" #include <algorithm> #include <dirent.h> #include <fcntl.h> #include <iostream> #include <list> #include <signal.h> #include <string.h> #include <sys/socket.h> #include <sys/types.h> #include <sys/wait.h> #include <unistd.h> #include <vector> #include <libcamera/base/event_notifier.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> /** * \file process.h * \brief Process object */ namespace libcamera { LOG_DEFINE_CATEGORY(Process) /** * \class ProcessManager * \brief Manager of processes * * The ProcessManager singleton keeps track of all created Process instances, * and manages the signal handling involved in terminating processes. */ namespace { void sigact(int signal, siginfo_t *info, void *ucontext) { #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-result" /* * We're in a signal handler so we can't log any message, and we need * to continue anyway. */ char data = 0; write(ProcessManager::instance()->writePipe(), &data, sizeof(data)); #pragma GCC diagnostic pop const struct sigaction &oldsa = ProcessManager::instance()->oldsa(); if (oldsa.sa_flags & SA_SIGINFO) { oldsa.sa_sigaction(signal, info, ucontext); } else { if (oldsa.sa_handler != SIG_IGN && oldsa.sa_handler != SIG_DFL) oldsa.sa_handler(signal); } } } /* namespace */ void ProcessManager::sighandler() { char data; ssize_t ret = read(pipe_[0].get(), &data, sizeof(data)); if (ret < 0) { LOG(Process, Error) << "Failed to read byte from signal handler pipe"; return; } for (auto it = processes_.begin(); it != processes_.end(); ) { Process *process = *it; int wstatus; pid_t pid = waitpid(process->pid_, &wstatus, WNOHANG); if (process->pid_ != pid) { ++it; continue; } it = processes_.erase(it); process->died(wstatus); } } /** * \brief Register process with process manager * \param[in] proc Process to register * * This function registers the \a proc with the process manager. It * shall be called by the parent process after successfully forking, in * order to let the parent signal process termination. */ void ProcessManager::registerProcess(Process *proc) { processes_.push_back(proc); } ProcessManager *ProcessManager::self_ = nullptr; /** * \brief Construct a ProcessManager instance * * The ProcessManager class is meant to only be instantiated once, by the * CameraManager. */ ProcessManager::ProcessManager() { if (self_) LOG(Process, Fatal) << "Multiple ProcessManager objects are not allowed"; sigaction(SIGCHLD, NULL, &oldsa_); struct sigaction sa; memset(&sa, 0, sizeof(sa)); sa.sa_sigaction = &sigact; memcpy(&sa.sa_mask, &oldsa_.sa_mask, sizeof(sa.sa_mask)); sigaddset(&sa.sa_mask, SIGCHLD); sa.sa_flags = oldsa_.sa_flags | SA_SIGINFO; sigaction(SIGCHLD, &sa, NULL); int pipe[2]; if (pipe2(pipe, O_CLOEXEC | O_DIRECT | O_NONBLOCK)) LOG(Process, Fatal) << "Failed to initialize pipe for signal handling"; pipe_[0] = UniqueFD(pipe[0]); pipe_[1] = UniqueFD(pipe[1]); sigEvent_ = new EventNotifier(pipe_[0].get(), EventNotifier::Read); sigEvent_->activated.connect(this, &ProcessManager::sighandler); self_ = this; } ProcessManager::~ProcessManager() { sigaction(SIGCHLD, &oldsa_, NULL); delete sigEvent_; self_ = nullptr; } /** * \brief Retrieve the Process manager instance * * The ProcessManager is constructed by the CameraManager. This function shall * be used to retrieve the single instance of the manager. * * \return The Process manager instance */ ProcessManager *ProcessManager::instance() { return self_; } /** * \brief Retrieve the Process manager's write pipe * * This function is meant only to be used by the static signal handler. * * \return Pipe for writing */ int ProcessManager::writePipe() const { return pipe_[1].get(); } /** * \brief Retrive the old signal action data * * This function is meant only to be used by the static signal handler. * * \return The old signal action data */ const struct sigaction &ProcessManager::oldsa() const { return oldsa_; } /** * \class Process * \brief Process object * * The Process class models a process, and simplifies spawning new processes * and monitoring the exiting of a process. */ /** * \enum Process::ExitStatus * \brief Exit status of process * \var Process::NotExited * The process hasn't exited yet * \var Process::NormalExit * The process exited normally, either via exit() or returning from main * \var Process::SignalExit * The process was terminated by a signal (this includes crashing) */ Process::Process() : pid_(-1), running_(false), exitStatus_(NotExited), exitCode_(0) { } Process::~Process() { kill(); /* \todo wait for child process to exit */ } /** * \brief Fork and exec a process, and close fds * \param[in] path Path to executable * \param[in] args Arguments to pass to executable (optional) * \param[in] fds Vector of file descriptors to keep open (optional) * * Fork a process, and exec the executable specified by path. Prior to * exec'ing, but after forking, all file descriptors except for those * specified in fds will be closed. * * All indexes of args will be incremented by 1 before being fed to exec(), * so args[0] should not need to be equal to path. * * \return Zero on successful fork, exec, and closing the file descriptors, * or a negative error code otherwise */ int Process::start(const std::string &path, const std::vector<std::string> &args, const std::vector<int> &fds) { int ret; if (running_) return 0; int childPid = fork(); if (childPid == -1) { ret = -errno; LOG(Process, Error) << "Failed to fork: " << strerror(-ret); return ret; } else if (childPid) { pid_ = childPid; ProcessManager::instance()->registerProcess(this); running_ = true; return 0; } else { if (isolate()) _exit(EXIT_FAILURE); closeAllFdsExcept(fds); const char *file = utils::secure_getenv("LIBCAMERA_LOG_FILE"); if (file && strcmp(file, "syslog")) unsetenv("LIBCAMERA_LOG_FILE"); const char **argv = new const char *[args.size() + 2]; unsigned int len = args.size(); argv[0] = path.c_str(); for (unsigned int i = 0; i < len; i++) argv[i+1] = args[i].c_str(); argv[len+1] = nullptr; execv(path.c_str(), (char **)argv); exit(EXIT_FAILURE); } } void Process::closeAllFdsExcept(const std::vector<int> &fds) { std::vector<int> v(fds); sort(v.begin(), v.end()); DIR *dir = opendir("/proc/self/fd"); if (!dir) return; int dfd = dirfd(dir); struct dirent *ent; while ((ent = readdir(dir)) != nullptr) { char *endp; int fd = strtoul(ent->d_name, &endp, 10); if (*endp) continue; if (fd >= 0 && fd != dfd && !std::binary_search(v.begin(), v.end(), fd)) close(fd); } closedir(dir); } int Process::isolate() { int ret = unshare(CLONE_NEWUSER | CLONE_NEWNET); if (ret) { ret = -errno; LOG(Process, Error) << "Failed to unshare execution context: " << strerror(-ret); return ret; } return 0; } /** * \brief SIGCHLD handler * \param[in] wstatus The status as output by waitpid() * * This function is called when the process associated with Process terminates. * It emits the Process::finished signal. */ void Process::died(int wstatus) { running_ = false; exitStatus_ = WIFEXITED(wstatus) ? NormalExit : SignalExit; exitCode_ = exitStatus_ == NormalExit ? WEXITSTATUS(wstatus) : -1; finished.emit(exitStatus_, exitCode_); } /** * \fn Process::exitStatus() * \brief Retrieve the exit status of the process * * Return the exit status of the process, that is, whether the process * has exited via exit() or returning from main, or if the process was * terminated by a signal. * * \sa ExitStatus * * \return The process exit status */ /** * \fn Process::exitCode() * \brief Retrieve the exit code of the process * * This function is only valid if exitStatus() returned NormalExit. * * \return Exit code */ /** * \var Process::finished * * Signal that is emitted when the process is confirmed to have terminated. */ /** * \brief Kill the process * * Sends SIGKILL to the process. */ void Process::kill() { if (pid_ > 0) ::kill(pid_, SIGKILL); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/tracepoints.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Tracepoints with lttng */ #define TRACEPOINT_CREATE_PROBES #define TRACEPOINT_DEFINE #include "libcamera/internal/tracepoints.h"
0
repos/libcamera/src
repos/libcamera/src/libcamera/mapped_framebuffer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Google Inc. * * Mapped Framebuffer support */ #include "libcamera/internal/mapped_framebuffer.h" #include <algorithm> #include <errno.h> #include <map> #include <sys/mman.h> #include <unistd.h> #include <libcamera/base/log.h> /** * \file libcamera/internal/mapped_framebuffer.h * \brief Frame buffer memory mapping support */ namespace libcamera { LOG_DECLARE_CATEGORY(Buffer) /** * \class MappedBuffer * \brief Provide an interface to support managing memory mapped buffers * * The MappedBuffer interface provides access to a set of MappedPlanes which * are available for access by the CPU. * * This class is not meant to be constructed directly, but instead derived * classes should be used to implement the correct mapping of a source buffer. * * This allows treating CPU accessible memory through a generic interface * regardless of whether it originates from a libcamera FrameBuffer or other * source. */ /** * \typedef MappedBuffer::Plane * \brief A mapped region of memory accessible to the CPU * * The MappedBuffer::Plane uses the Span interface to describe the mapped memory * region. */ /** * \brief Construct an empty MappedBuffer */ MappedBuffer::MappedBuffer() : error_(0) { } /** * \brief Move constructor, construct the MappedBuffer with the contents of \a * other using move semantics * \param[in] other The other MappedBuffer * * Moving a MappedBuffer moves the mappings contained in the \a other to the new * MappedBuffer and invalidates the \a other. * * No mappings are unmapped or destroyed in this process. */ MappedBuffer::MappedBuffer(MappedBuffer &&other) { *this = std::move(other); } /** * \brief Move assignment operator, replace the mappings with those of \a other * \param[in] other The other MappedBuffer * * Moving a MappedBuffer moves the mappings contained in the \a other to the new * MappedBuffer and invalidates the \a other. * * No mappings are unmapped or destroyed in this process. */ MappedBuffer &MappedBuffer::operator=(MappedBuffer &&other) { error_ = other.error_; planes_ = std::move(other.planes_); maps_ = std::move(other.maps_); other.error_ = -ENOENT; return *this; } MappedBuffer::~MappedBuffer() { for (Plane &map : maps_) munmap(map.data(), map.size()); } /** * \fn MappedBuffer::isValid() * \brief Check if the MappedBuffer instance is valid * \return True if the MappedBuffer has valid mappings, false otherwise */ /** * \fn MappedBuffer::error() * \brief Retrieve the map error status * * This function retrieves the error status from the MappedBuffer. * The error status is a negative number as defined by errno.h. If * no error occurred, this function returns 0. * * \return The map error code */ /** * \fn MappedBuffer::planes() * \brief Retrieve the mapped planes * * This function retrieves the successfully mapped planes stored as a vector * of Span<uint8_t> to provide access to the mapped memory. * * \return A vector of the mapped planes */ /** * \var MappedBuffer::error_ * \brief Stores the error value if present * * MappedBuffer derived classes shall set this to a negative value as defined * by errno.h if an error occured during the mapping process. */ /** * \var MappedBuffer::planes_ * \brief Stores the internal mapped planes * * MappedBuffer derived classes shall store the mappings they create in this * vector which points the beginning of mapped plane addresses. */ /** * \var MappedBuffer::maps_ * \brief Stores the mapped buffer * * MappedBuffer derived classes shall store the mappings they create in this * vector which is parsed during destruct to unmap any memory mappings which * completed successfully. */ /** * \class MappedFrameBuffer * \brief Map a FrameBuffer using the MappedBuffer interface */ /** * \enum MappedFrameBuffer::MapFlag * \brief Specify the mapping mode for the FrameBuffer * \var MappedFrameBuffer::Read * \brief Create a read-only mapping * \var MappedFrameBuffer::Write * \brief Create a write-only mapping * \var MappedFrameBuffer::ReadWrite * \brief Create a mapping that can be both read and written */ /** * \typedef MappedFrameBuffer::MapFlags * \brief A bitwise combination of MappedFrameBuffer::MapFlag values */ /** * \brief Map all planes of a FrameBuffer * \param[in] buffer FrameBuffer to be mapped * \param[in] flags Protection flags to apply to map * * Construct an object to map a frame buffer for CPU access. The mapping can be * made as Read only, Write only or support Read and Write operations by setting * the MapFlag flags accordingly. */ MappedFrameBuffer::MappedFrameBuffer(const FrameBuffer *buffer, MapFlags flags) { ASSERT(!buffer->planes().empty()); planes_.reserve(buffer->planes().size()); int mmapFlags = 0; if (flags & MapFlag::Read) mmapFlags |= PROT_READ; if (flags & MapFlag::Write) mmapFlags |= PROT_WRITE; struct MappedBufferInfo { uint8_t *address = nullptr; size_t mapLength = 0; size_t dmabufLength = 0; }; std::map<int, MappedBufferInfo> mappedBuffers; for (const FrameBuffer::Plane &plane : buffer->planes()) { const int fd = plane.fd.get(); if (mappedBuffers.find(fd) == mappedBuffers.end()) { const size_t length = lseek(fd, 0, SEEK_END); mappedBuffers[fd] = MappedBufferInfo{ nullptr, 0, length }; } const size_t length = mappedBuffers[fd].dmabufLength; if (plane.offset > length || plane.offset + plane.length > length) { LOG(Buffer, Fatal) << "plane is out of buffer: " << "buffer length=" << length << ", plane offset=" << plane.offset << ", plane length=" << plane.length; return; } size_t &mapLength = mappedBuffers[fd].mapLength; mapLength = std::max(mapLength, static_cast<size_t>(plane.offset + plane.length)); } for (const FrameBuffer::Plane &plane : buffer->planes()) { const int fd = plane.fd.get(); auto &info = mappedBuffers[fd]; if (!info.address) { void *address = mmap(nullptr, info.mapLength, mmapFlags, MAP_SHARED, fd, 0); if (address == MAP_FAILED) { error_ = -errno; LOG(Buffer, Error) << "Failed to mmap plane: " << strerror(-error_); return; } info.address = static_cast<uint8_t *>(address); maps_.emplace_back(info.address, info.mapLength); } planes_.emplace_back(info.address + plane.offset, plane.length); } } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/converter.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright 2022 NXP * * Generic format converter interface */ #include "libcamera/internal/converter.h" #include <algorithm> #include <libcamera/base/log.h> #include "libcamera/internal/media_device.h" /** * \file internal/converter.h * \brief Abstract converter */ namespace libcamera { LOG_DEFINE_CATEGORY(Converter) /** * \class Converter * \brief Abstract Base Class for converter * * The Converter class is an Abstract Base Class defining the interfaces of * converter implementations. * * Converters offer scaling and pixel format conversion services on an input * stream. The converter can output multiple streams with individual conversion * parameters from the same input stream. */ /** * \brief Construct a Converter instance * \param[in] media The media device implementing the converter * * This searches for the entity implementing the data streaming function in the * media graph entities and use its device node as the converter device node. */ Converter::Converter(MediaDevice *media) { const std::vector<MediaEntity *> &entities = media->entities(); auto it = std::find_if(entities.begin(), entities.end(), [](MediaEntity *entity) { return entity->function() == MEDIA_ENT_F_IO_V4L; }); if (it == entities.end()) { LOG(Converter, Error) << "No entity suitable for implementing a converter in " << media->driver() << " entities list."; return; } deviceNode_ = (*it)->deviceNode(); } Converter::~Converter() { } /** * \fn Converter::loadConfiguration() * \brief Load converter configuration from file * \param[in] filename The file name path * * Load converter dependent configuration parameters to apply on the hardware. * * \return 0 on success or a negative error code otherwise */ /** * \fn Converter::isValid() * \brief Check if the converter configuration is valid * \return True is the converter is valid, false otherwise */ /** * \fn Converter::formats() * \brief Retrieve the list of supported pixel formats for an input pixel format * \param[in] input Input pixel format to retrieve output pixel format list for * \return The list of supported output pixel formats */ /** * \fn Converter::sizes() * \brief Retrieve the range of minimum and maximum output sizes for an input size * \param[in] input Input stream size to retrieve range for * \return A range of output image sizes */ /** * \fn Converter::strideAndFrameSize() * \brief Retrieve the output stride and frame size for an input configutation * \param[in] pixelFormat Input stream pixel format * \param[in] size Input stream size * \return A tuple indicating the stride and frame size or an empty tuple on error */ /** * \fn Converter::configure() * \brief Configure a set of output stream conversion from an input stream * \param[in] inputCfg Input stream configuration * \param[out] outputCfgs A list of output stream configurations * \return 0 on success or a negative error code otherwise */ /** * \fn Converter::exportBuffers() * \brief Export buffers from the converter device * \param[in] output Output stream index exporting the buffers * \param[in] count Number of buffers to allocate * \param[out] buffers Vector to store allocated buffers * * This function operates similarly to V4L2VideoDevice::exportBuffers() on the * output stream indicated by the \a output index. * * \return The number of allocated buffers on success or a negative error code * otherwise */ /** * \fn Converter::start() * \brief Start the converter streaming operation * \return 0 on success or a negative error code otherwise */ /** * \fn Converter::stop() * \brief Stop the converter streaming operation */ /** * \fn Converter::queueBuffers() * \brief Queue buffers to converter device * \param[in] input The frame buffer to apply the conversion * \param[out] outputs The container holding the output stream indexes and * their respective frame buffer outputs. * * This function queues the \a input frame buffer on the output streams of the * \a outputs map key and retrieve the output frame buffer indicated by the * buffer map value. * * \return 0 on success or a negative error code otherwise */ /** * \var Converter::inputBufferReady * \brief A signal emitted when the input frame buffer completes */ /** * \var Converter::outputBufferReady * \brief A signal emitted on each frame buffer completion of the output queue */ /** * \fn Converter::deviceNode() * \brief The converter device node attribute accessor * \return The converter device node string */ /** * \class ConverterFactoryBase * \brief Base class for converter factories * * The ConverterFactoryBase class is the base of all specializations of the * ConverterFactory class template. It implements the factory registration, * maintains a registry of factories, and provides access to the registered * factories. */ /** * \brief Construct a converter factory base * \param[in] name Name of the converter class * \param[in] compatibles Name aliases of the converter class * * Creating an instance of the factory base registers it with the global list of * factories, accessible through the factories() function. * * The factory \a name is used as unique identifier. If the converter * implementation fully relies on a generic framework, the name should be the * same as the framework. Otherwise, if the implementation is specialized, the * factory name should match the driver name implementing the function. * * The factory \a compatibles holds a list of driver names implementing a generic * subsystem without any personalizations. */ ConverterFactoryBase::ConverterFactoryBase(const std::string name, std::initializer_list<std::string> compatibles) : name_(name), compatibles_(compatibles) { registerType(this); } /** * \fn ConverterFactoryBase::compatibles() * \return The list of compatible name aliases of the converter */ /** * \brief Create an instance of the converter corresponding to the media device * \param[in] media The media device to create the converter for * * The converter is created by matching the factory name or any of its * compatible aliases with the media device driver name. * * \return A new instance of the converter subclass corresponding to the media * device, or null if the media device driver name doesn't match anything */ std::unique_ptr<Converter> ConverterFactoryBase::create(MediaDevice *media) { const std::vector<ConverterFactoryBase *> &factories = ConverterFactoryBase::factories(); for (const ConverterFactoryBase *factory : factories) { const std::vector<std::string> &compatibles = factory->compatibles(); auto it = std::find(compatibles.begin(), compatibles.end(), media->driver()); if (it == compatibles.end() && media->driver() != factory->name_) continue; LOG(Converter, Debug) << "Creating converter from " << factory->name_ << " factory with " << (it == compatibles.end() ? "no" : media->driver()) << " alias."; std::unique_ptr<Converter> converter = factory->createInstance(media); if (converter->isValid()) return converter; } return nullptr; } /** * \brief Add a converter factory to the registry * \param[in] factory Factory to use to construct the converter class * * The caller is responsible to guarantee the uniqueness of the converter * factory name. */ void ConverterFactoryBase::registerType(ConverterFactoryBase *factory) { std::vector<ConverterFactoryBase *> &factories = ConverterFactoryBase::factories(); factories.push_back(factory); } /** * \brief Retrieve the list of all converter factory names * \return The list of all converter factory names */ std::vector<std::string> ConverterFactoryBase::names() { std::vector<std::string> list; std::vector<ConverterFactoryBase *> &factories = ConverterFactoryBase::factories(); for (ConverterFactoryBase *factory : factories) { list.push_back(factory->name_); for (auto alias : factory->compatibles()) list.push_back(alias); } return list; } /** * \brief Retrieve the list of all converter factories * \return The list of converter factories */ std::vector<ConverterFactoryBase *> &ConverterFactoryBase::factories() { /* * The static factories map is defined inside the function to ensure * it gets initialized on first use, without any dependency on link * order. */ static std::vector<ConverterFactoryBase *> factories; return factories; } /** * \var ConverterFactoryBase::name_ * \brief The name of the factory */ /** * \var ConverterFactoryBase::compatibles_ * \brief The list holding the factory compatibles */ /** * \class ConverterFactory * \brief Registration of ConverterFactory classes and creation of instances * \param _Converter The converter class type for this factory * * To facilitate discovery and instantiation of Converter classes, the * ConverterFactory class implements auto-registration of converter helpers. * Each Converter subclass shall register itself using the REGISTER_CONVERTER() * macro, which will create a corresponding instance of a ConverterFactory * subclass and register it with the static list of factories. */ /** * \fn ConverterFactory::ConverterFactory(const char *name, std::initializer_list<std::string> compatibles) * \brief Construct a converter factory * \details \copydetails ConverterFactoryBase::ConverterFactoryBase */ /** * \fn ConverterFactory::createInstance() const * \brief Create an instance of the Converter corresponding to the factory * \param[in] media Media device pointer * \return A unique pointer to a newly constructed instance of the Converter * subclass corresponding to the factory */ /** * \def REGISTER_CONVERTER * \brief Register a converter with the Converter factory * \param[in] name Converter name used to register the class * \param[in] converter Class name of Converter derived class to register * \param[in] compatibles List of compatible names * * Register a Converter subclass with the factory and make it available to try * and match converters. */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/sysfs.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Miscellaneous utility functions to access sysfs */ #include "libcamera/internal/sysfs.h" #include <fstream> #include <sstream> #include <sys/stat.h> #include <sys/sysmacros.h> #include <libcamera/base/file.h> #include <libcamera/base/log.h> /** * \file sysfs.h * \brief Miscellaneous utility functions to access sysfs */ namespace libcamera { LOG_DEFINE_CATEGORY(SysFs) namespace sysfs { /** * \brief Retrieve the sysfs path for a character device * \param[in] deviceNode Path to character device node * \return The sysfs path on success or an empty string on failure */ std::string charDevPath(const std::string &deviceNode) { struct stat st; int ret = stat(deviceNode.c_str(), &st); if (ret < 0) { ret = -errno; LOG(SysFs, Error) << "Unable to stat '" << deviceNode << "': " << strerror(-ret); return {}; } std::ostringstream dev("/sys/dev/char/", std::ios_base::ate); dev << major(st.st_rdev) << ":" << minor(st.st_rdev); return dev.str(); } /** * \brief Retrieve the path of the firmware node for a device * \param[in] device Path in sysfs to search * * Physical devices in a system are described by the system firmware. Depending * on the type of platform, devices are identified using different naming * schemes. The Linux kernel abstract those differences with "firmware nodes". * This function retrieves the firmware node path corresponding to the * \a device. * * For DT-based systems, the path is the full name of the DT node that * represents the device. For ACPI-based systems, the path is the absolute * namespace path to the ACPI object that represents the device. In both cases, * the path is guaranteed to be unique and persistent as long as the system * firmware is not modified. * * \return The firmware node path on success or an empty string on failure */ std::string firmwareNodePath(const std::string &device) { std::string fwPath, node; struct stat st; /* Lookup for DT-based systems */ node = device + "/of_node"; if (!stat(node.c_str(), &st)) { char *ofPath = realpath(node.c_str(), nullptr); if (!ofPath) return {}; static const char prefix[] = "/sys/firmware/devicetree"; if (strncmp(ofPath, prefix, strlen(prefix)) == 0) fwPath = ofPath + strlen(prefix); else fwPath = ofPath; free(ofPath); return fwPath; } /* Lookup for ACPI-based systems */ node = device + "/firmware_node/path"; if (File::exists(node)) { std::ifstream file(node); if (!file.is_open()) return {}; std::getline(file, fwPath); file.close(); return fwPath; } return {}; } } /* namespace sysfs */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/stream.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Video stream for a Camera */ #include <libcamera/stream.h> #include <algorithm> #include <array> #include <iomanip> #include <limits.h> #include <sstream> #include <libcamera/request.h> #include <libcamera/base/log.h> #include <libcamera/base/utils.h> /** * \file stream.h * \brief Video stream for a Camera * * A camera device can provide frames in different resolutions and formats * concurrently from a single image source. The Stream class represents * one of the multiple concurrent streams. * * All streams exposed by a camera device share the same image source and are * thus not fully independent. Parameters related to the image source, such as * the exposure time or flash control, are common to all streams. Other * parameters, such as format or resolution, may be specified per-stream, * depending on the capabilities of the camera device. * * Camera devices expose at least one stream, and may expose additional streams * based on the device capabilities. This can be used, for instance, to * implement concurrent viewfinder and video capture, or concurrent viewfinder, * video capture and still image capture. */ namespace libcamera { LOG_DEFINE_CATEGORY(Stream) /** * \class StreamFormats * \brief Hold information about supported stream formats * * The StreamFormats class holds information about the pixel formats and frame * sizes a stream supports. The class groups size information by the pixel * format which can produce it. * * There are two ways to examine the size information, as a range or as a list * of discrete sizes. When sizes are viewed as a range it describes the minimum * and maximum width and height values. The range description can include * horizontal and vertical steps. * * When sizes are viewed as a list of discrete sizes they describe the exact * dimensions which can be selected and used. * * Pipeline handlers can create StreamFormats describing each pixel format using * either a range or a list of discrete sizes. The StreamFormats class attempts * to translate between the two different ways to view them. The translations * are performed as: * * - If the StreamFormat is constructed using a list of discrete image sizes * and a range is requested, it gets created by taking the minimum and * maximum width/height in the list. The step information is not recreated * and is set to 0 to indicate the range is generated. * * - If the image sizes used to construct a StreamFormat are expressed as a * range and a list of discrete sizes is requested, one which fits inside * that range are selected from a list of common sizes. The step information * is taken into consideration when generating the sizes. * * Applications examining sizes as a range with step values of 0 shall be * aware that the range are generated from a list of discrete sizes and there * could be a large number of possible Size combinations that may not be * supported by the Stream. * * All sizes retrieved from StreamFormats shall be treated as advisory and no * size shall be considered to be supported until it has been verified using * CameraConfiguration::validate(). * * \todo Review the usage patterns of this class, and cache the computed * pixelformats(), sizes() and range() if this would improve performances. */ StreamFormats::StreamFormats() { } /** * \brief Construct a StreamFormats object with a map of image formats * \param[in] formats A map of pixel formats to a sizes description */ StreamFormats::StreamFormats(const std::map<PixelFormat, std::vector<SizeRange>> &formats) : formats_(formats) { } /** * \brief Retrieve the list of supported pixel formats * \return The list of supported pixel formats */ std::vector<PixelFormat> StreamFormats::pixelformats() const { std::vector<PixelFormat> formats; for (auto const &it : formats_) formats.push_back(it.first); return formats; } /** * \brief Retrieve the list of frame sizes supported for \a pixelformat * \param[in] pixelformat PixelFormat to retrieve sizes for * * If the sizes described for \a pixelformat are discrete they are returned * directly. * * If the sizes are described as a range, a list of discrete sizes are computed * from a list of common resolutions that fit inside the described range. When * computing the discrete list step values are considered but there are no * guarantees that all sizes computed are supported. * * \return A list of frame sizes or an empty list on error */ std::vector<Size> StreamFormats::sizes(const PixelFormat &pixelformat) const { /* * Sizes to try and extract from ranges. * \todo Verify list of resolutions are good, current list compiled * from v4l2 documentation and source code as well as lists of * common frame sizes. */ static const std::array<Size, 53> rangeDiscreteSizes = { Size(160, 120), Size(240, 160), Size(320, 240), Size(400, 240), Size(480, 320), Size(640, 360), Size(640, 480), Size(720, 480), Size(720, 576), Size(768, 480), Size(800, 600), Size(854, 480), Size(960, 540), Size(960, 640), Size(1024, 576), Size(1024, 600), Size(1024, 768), Size(1152, 864), Size(1280, 1024), Size(1280, 1080), Size(1280, 720), Size(1280, 800), Size(1360, 768), Size(1366, 768), Size(1400, 1050), Size(1440, 900), Size(1536, 864), Size(1600, 1200), Size(1600, 900), Size(1680, 1050), Size(1920, 1080), Size(1920, 1200), Size(2048, 1080), Size(2048, 1152), Size(2048, 1536), Size(2160, 1080), Size(2560, 1080), Size(2560, 1440), Size(2560, 1600), Size(2560, 2048), Size(2960, 1440), Size(3200, 1800), Size(3200, 2048), Size(3200, 2400), Size(3440, 1440), Size(3840, 1080), Size(3840, 1600), Size(3840, 2160), Size(3840, 2400), Size(4096, 2160), Size(5120, 2160), Size(5120, 2880), Size(7680, 4320), }; std::vector<Size> sizes; /* Make sure pixel format exists. */ auto const &it = formats_.find(pixelformat); if (it == formats_.end()) return {}; /* Try creating a list of discrete sizes. */ const std::vector<SizeRange> &ranges = it->second; bool discrete = true; for (const SizeRange &range : ranges) { if (range.min != range.max) { discrete = false; break; } sizes.emplace_back(range.min); } /* If discrete not possible generate from range. */ if (!discrete) { if (ranges.size() != 1) { LOG(Stream, Error) << "Range format is ambiguous"; return {}; } const SizeRange &limit = ranges.front(); sizes.clear(); for (const Size &size : rangeDiscreteSizes) if (limit.contains(size)) sizes.push_back(size); } std::sort(sizes.begin(), sizes.end()); return sizes; } /** * \brief Retrieve the range of minimum and maximum sizes * \param[in] pixelformat PixelFormat to retrieve range for * * If the size described for \a pixelformat is a range, that range is returned * directly. If the sizes described are a list of discrete sizes, a range is * created from the minimum and maximum sizes in the list. The step values of * the range are set to 0 to indicate that the range is generated and that not * all image sizes contained in the range might be supported. * * \return A range of valid image sizes or an empty range on error */ SizeRange StreamFormats::range(const PixelFormat &pixelformat) const { auto const it = formats_.find(pixelformat); if (it == formats_.end()) return {}; const std::vector<SizeRange> &ranges = it->second; if (ranges.size() == 1) return ranges[0]; LOG(Stream, Debug) << "Building range from discrete sizes"; SizeRange range({ UINT_MAX, UINT_MAX }, { 0, 0 }); for (const SizeRange &limit : ranges) { if (limit.min < range.min) range.min = limit.min; if (limit.max > range.max) range.max = limit.max; } range.hStep = 0; range.vStep = 0; return range; } /** * \struct StreamConfiguration * \brief Configuration parameters for a stream * * The StreamConfiguration structure models all information which can be * configured for a single video stream. */ /** * \todo This function is deprecated and should be removed once all pipeline * handlers provide StreamFormats. */ StreamConfiguration::StreamConfiguration() : pixelFormat(0), stride(0), frameSize(0), bufferCount(0), stream_(nullptr) { } /** * \brief Construct a configuration with stream formats */ StreamConfiguration::StreamConfiguration(const StreamFormats &formats) : pixelFormat(0), stride(0), frameSize(0), bufferCount(0), stream_(nullptr), formats_(formats) { } /** * \var StreamConfiguration::size * \brief Stream size in pixels */ /** * \var StreamConfiguration::pixelFormat * \brief Stream pixel format */ /** * \var StreamConfiguration::stride * \brief Image stride for the stream, in bytes * * The stride value reports the number of bytes between the beginning of * successive lines in an image buffer for this stream. The value is * valid after successfully validating the configuration with a call to * CameraConfiguration::validate(). For compressed formats (such as MJPEG), * this value will be zero. */ /** * \var StreamConfiguration::frameSize * \brief Frame size for the stream, in bytes * * The frameSize value reports the number of bytes necessary to contain one * frame of an image buffer for this stream. This total includes the bytes * required for all image planes. The value is valid after successfully * validating the configuration with a call to CameraConfiguration::validate(). */ /** * \var StreamConfiguration::bufferCount * \brief Requested number of buffers to allocate for the stream */ /** * \var StreamConfiguration::colorSpace * \brief The ColorSpace for this stream * * This field allows a ColorSpace to be selected for this Stream. * * The field is optional and an application can choose to leave it unset. * Platforms that support the use of color spaces may provide default * values through the generateConfiguration() method. An application can * override these when necessary. * * If a specific ColorSpace is requested but the Camera cannot deliver it, * then the StreamConfiguration will be adjusted to a value that can be * delivered. In this case the validate() method will indicate via its * return value that the CameraConfiguration has been adjusted. * * Note that platforms will typically have different constraints on what * color spaces can be supported and in what combinations. */ /** * \fn StreamConfiguration::stream() * \brief Retrieve the stream associated with the configuration * * When a camera is configured with Camera::configure() Stream instances are * associated with each stream configuration entry. This function retrieves the * associated Stream, which remains valid until the next call to * Camera::configure() or Camera::release(). * * \return The stream associated with the configuration */ /** * \fn StreamConfiguration::setStream() * \brief Associate a stream with a configuration * * This function is meant for the PipelineHandler::configure() function and * shall not be called by applications. * * \param[in] stream The stream */ /** * \fn StreamConfiguration::formats() * \brief Retrieve advisory stream format information * * This function retrieves information about the pixel formats and sizes * supported by the stream configuration. The sizes are advisory and not all of * them are guaranteed to be supported by the stream. Users shall always inspect * the size in the stream configuration after calling * CameraConfiguration::validate(). * * \return Stream formats information */ /** * \brief Assemble and return a string describing the configuration * * \return A string describing the StreamConfiguration */ std::string StreamConfiguration::toString() const { return size.toString() + "-" + pixelFormat.toString(); } /** * \enum StreamRole * \brief Identify the role a stream is intended to play * * The StreamRole describes how an application intends to use a stream. Roles * are specified by applications and passed to cameras, that then select the * most appropriate streams and their default configurations. * * \var Raw * The stream is intended to capture raw frames from the sensor. * \var StillCapture * The stream is intended to capture high-resolution, high-quality still images * with low frame rate. The captured frames may be exposed with flash. * \var VideoRecording * The stream is intended to capture video for the purpose of recording or * streaming. The video stream may produce a high frame rate and may be * enhanced with video stabilization. * \var Viewfinder * The stream is intended to capture video for the purpose of display on the * local screen. Trade-offs between quality and usage of system resources are * acceptable. */ /** * \brief Insert a text representation of a StreamRole into an output stream * \param[in] out The output stream * \param[in] role The StreamRole * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, StreamRole role) { static constexpr std::array<const char *, 4> names{ "Raw", "StillCapture", "VideoRecording", "Viewfinder", }; out << names[utils::to_underlying(role)]; return out; } /** * \class Stream * \brief Video stream for a camera * * The Stream class models all static information which are associated with a * single video stream. Streams are exposed by the Camera object they belong to. * * Cameras may supply more than one stream from the same video source. In such * cases an application can inspect all available streams and select the ones * that best fit its use case. * * \todo Add capabilities to the stream API. Without this the Stream class only * serves to reveal how many streams of unknown capabilities a camera supports. * This in itself is productive as it allows applications to configure and * capture from one or more streams even if they won't be able to select the * optimal stream for the task. */ /** * \brief Construct a stream with default parameters */ Stream::Stream() { } /** * \fn Stream::configuration() * \brief Retrieve the active configuration of the stream * \return The active configuration of the stream */ /** * \var Stream::configuration_ * \brief The stream configuration * * The configuration for the stream is set by any successful call to * Camera::configure() that includes the stream, and remains valid until the * next call to Camera::configure() regardless of if it includes the stream. */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/dma_buf_allocator.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2024, Red Hat Inc. * Copyright (C) 2020, Raspberry Pi Ltd * * Helper class for dma-buf allocations. */ #include "libcamera/internal/dma_buf_allocator.h" #include <array> #include <fcntl.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <sys/stat.h> #include <sys/syscall.h> #include <sys/types.h> #include <unistd.h> #include <linux/dma-buf.h> #include <linux/dma-heap.h> #include <linux/udmabuf.h> #include <libcamera/base/log.h> /** * \file dma_buf_allocator.cpp * \brief dma-buf allocator */ namespace libcamera { #ifndef __DOXYGEN__ struct DmaBufAllocatorInfo { DmaBufAllocator::DmaBufAllocatorFlag type; const char *deviceNodeName; }; #endif static constexpr std::array<DmaBufAllocatorInfo, 4> providerInfos = { { /* * /dev/dma_heap/linux,cma is the CMA dma-heap. When the cma heap size is * specified on the kernel command line, this gets renamed to "reserved". */ { DmaBufAllocator::DmaBufAllocatorFlag::CmaHeap, "/dev/dma_heap/linux,cma" }, { DmaBufAllocator::DmaBufAllocatorFlag::CmaHeap, "/dev/dma_heap/reserved" }, { DmaBufAllocator::DmaBufAllocatorFlag::SystemHeap, "/dev/dma_heap/system" }, { DmaBufAllocator::DmaBufAllocatorFlag::UDmaBuf, "/dev/udmabuf" }, } }; LOG_DEFINE_CATEGORY(DmaBufAllocator) /** * \class DmaBufAllocator * \brief Helper class for dma-buf allocations * * This class wraps a userspace dma-buf provider selected at construction time, * and exposes functions to allocate dma-buffers from this provider. * * Different providers may provide dma-buffers with different properties for * the underlying memory. Which providers are acceptable is specified through * the type argument passed to the DmaBufAllocator() constructor. */ /** * \enum DmaBufAllocator::DmaBufAllocatorFlag * \brief Type of the dma-buf provider * \var DmaBufAllocator::CmaHeap * \brief Allocate from a CMA dma-heap, providing physically-contiguous memory * \var DmaBufAllocator::SystemHeap * \brief Allocate from the system dma-heap, using the page allocator * \var DmaBufAllocator::UDmaBuf * \brief Allocate using a memfd + /dev/udmabuf */ /** * \typedef DmaBufAllocator::DmaBufAllocatorFlags * \brief A bitwise combination of DmaBufAllocator::DmaBufAllocatorFlag values */ /** * \brief Construct a DmaBufAllocator of a given type * \param[in] type The type(s) of the dma-buf providers to allocate from * * The dma-buf provider type is selected with the \a type parameter, which * defaults to the CMA heap. If no provider of the given type can be accessed, * the constructed DmaBufAllocator instance is invalid as indicated by * the isValid() function. * * Multiple types can be selected by combining type flags, in which case * the constructed DmaBufAllocator will match one of the types. If multiple * requested types can work on the system, which provider is used is undefined. */ DmaBufAllocator::DmaBufAllocator(DmaBufAllocatorFlags type) { for (const auto &info : providerInfos) { if (!(type & info.type)) continue; int ret = ::open(info.deviceNodeName, O_RDWR | O_CLOEXEC, 0); if (ret < 0) { ret = errno; LOG(DmaBufAllocator, Debug) << "Failed to open " << info.deviceNodeName << ": " << strerror(ret); continue; } LOG(DmaBufAllocator, Debug) << "Using " << info.deviceNodeName; providerHandle_ = UniqueFD(ret); type_ = info.type; break; } if (!providerHandle_.isValid()) LOG(DmaBufAllocator, Error) << "Could not open any dma-buf provider"; } /** * \brief Destroy the DmaBufAllocator instance */ DmaBufAllocator::~DmaBufAllocator() = default; /** * \fn DmaBufAllocator::isValid() * \brief Check if the DmaBufAllocator instance is valid * \return True if the DmaBufAllocator is valid, false otherwise */ /* uClibc doesn't provide the file sealing API. */ #ifndef __DOXYGEN__ #if not HAVE_FILE_SEALS #define F_ADD_SEALS 1033 #define F_SEAL_SHRINK 0x0002 #endif #endif UniqueFD DmaBufAllocator::allocFromUDmaBuf(const char *name, std::size_t size) { /* Size must be a multiple of the page size. Round it up. */ std::size_t pageMask = sysconf(_SC_PAGESIZE) - 1; size = (size + pageMask) & ~pageMask; #if HAVE_MEMFD_CREATE int ret = memfd_create(name, MFD_ALLOW_SEALING | MFD_CLOEXEC); #else int ret = syscall(SYS_memfd_create, name, MFD_ALLOW_SEALING | MFD_CLOEXEC); #endif if (ret < 0) { ret = errno; LOG(DmaBufAllocator, Error) << "Failed to allocate memfd storage for " << name << ": " << strerror(ret); return {}; } UniqueFD memfd(ret); ret = ftruncate(memfd.get(), size); if (ret < 0) { ret = errno; LOG(DmaBufAllocator, Error) << "Failed to set memfd size for " << name << ": " << strerror(ret); return {}; } /* udmabuf dma-buffers *must* have the F_SEAL_SHRINK seal. */ ret = fcntl(memfd.get(), F_ADD_SEALS, F_SEAL_SHRINK); if (ret < 0) { ret = errno; LOG(DmaBufAllocator, Error) << "Failed to seal the memfd for " << name << ": " << strerror(ret); return {}; } struct udmabuf_create create; create.memfd = memfd.get(); create.flags = UDMABUF_FLAGS_CLOEXEC; create.offset = 0; create.size = size; ret = ::ioctl(providerHandle_.get(), UDMABUF_CREATE, &create); if (ret < 0) { ret = errno; LOG(DmaBufAllocator, Error) << "Failed to create dma buf for " << name << ": " << strerror(ret); return {}; } /* The underlying memfd is kept as as a reference in the kernel. */ return UniqueFD(ret); } UniqueFD DmaBufAllocator::allocFromHeap(const char *name, std::size_t size) { struct dma_heap_allocation_data alloc = {}; int ret; alloc.len = size; alloc.fd_flags = O_CLOEXEC | O_RDWR; ret = ::ioctl(providerHandle_.get(), DMA_HEAP_IOCTL_ALLOC, &alloc); if (ret < 0) { LOG(DmaBufAllocator, Error) << "dma-heap allocation failure for " << name; return {}; } UniqueFD allocFd(alloc.fd); ret = ::ioctl(allocFd.get(), DMA_BUF_SET_NAME, name); if (ret < 0) { LOG(DmaBufAllocator, Error) << "dma-heap naming failure for " << name; return {}; } return allocFd; } /** * \brief Allocate a dma-buf from the DmaBufAllocator * \param [in] name The name to set for the allocated buffer * \param [in] size The size of the buffer to allocate * * Allocates a dma-buf with read/write access. * * If the allocation fails, return an invalid UniqueFD. * * \return The UniqueFD of the allocated buffer */ UniqueFD DmaBufAllocator::alloc(const char *name, std::size_t size) { if (!name) return {}; if (type_ == DmaBufAllocator::DmaBufAllocatorFlag::UDmaBuf) return allocFromUDmaBuf(name, size); else return allocFromHeap(name, size); } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_controls.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * IPA control handling */ #include <libcamera/ipa/ipa_controls.h> /** * \file ipa_controls.h * \brief Type definitions for serialized controls * * This file defines binary formats to store ControlList and ControlInfoMap * instances in contiguous, self-contained memory areas called control packets. * It describes the layout of the packets through a set of C structures. These * formats shall be used when serializing ControlList and ControlInfoMap to * transfer them through the IPA C interface and IPA IPC transports. * * A control packet contains a list of entries, each of them describing a single * control info or control value. The packet starts with a fixed-size header * described by the ipa_controls_header structure, followed by an array of * fixed-size entries. Each entry is associated with data, stored either * directly in the entry, or in a data section after the entries array. * * The following diagram describes the layout of the ControlList packet. * * ~~~~ * +-------------------------+ . . * Header / | ipa_controls_header | | | * | | | | | * \ | | | | * +-------------------------+ | | * / | ipa_control_value_entry | | hdr.data_offset | * | | #0 | | | * Control | +-------------------------+ | | * value | | ... | | | * entries | +-------------------------+ | | * | | ipa_control_value_entry | | hdr.size | * \ | #hdr.entries - 1 | | | * +-------------------------+ | | * | empty space (optional) | | | * +-------------------------+ <--Β΄ . | * / | ... | | entry[n].offset | * Data | | ... | | | * section | | value data for entry #n | <-----Β΄ | * \ | ... | | * +-------------------------+ | * | empty space (optional) | | * +-------------------------+ <-------------------------Β΄ * ~~~~ * * The packet header contains the size of the packet, the number of entries, and * the offset from the beginning of the packet to the data section. The packet * entries array immediately follows the header. The data section starts at the * offset ipa_controls_header::data_offset from the beginning of the packet, and * shall be aligned to a multiple of 8 bytes. * * Entries are described by the ipa_control_value_entry structure. They contain * the numerical ID of the control, its type, and the number of control values. * * The control values are stored in the data section in the platform's native * format. The ipa_control_value_entry::offset field stores the offset from the * beginning of the data section to the values. * * All control values in the data section shall be stored in the same order as * the respective control entries, shall be aligned to a multiple of 8 bytes, * and shall be contiguous in memory. * * Empty spaces may be present between the end of the entries array and the * data section, and after the data section. They shall be ignored when parsing * the packet. * * The following diagram describes the layout of the ControlInfoMap packet. * * ~~~~ * +-------------------------+ . . * Header / | ipa_controls_header | | | * | | | | | * \ | | | | * +-------------------------+ | | * / | ipa_control_info_entry | | hdr.data_offset | * | | #0 | | | * Control | +-------------------------+ | | * info | | ... | | | * entries | +-------------------------+ | | * | | ipa_control_info_entry | | hdr.size | * \ | #hdr.entries - 1 | | | * +-------------------------+ | | * | empty space (optional) | | | * +-------------------------+ <--Β΄ . | * / | ... | | entry[n].offset | * Data | | ... | | | * section | | info data for entry #n | <-----Β΄ | * \ | ... | | * +-------------------------+ | * | empty space (optional) | | * +-------------------------+ <-------------------------Β΄ * ~~~~ * * The packet header is identical to the ControlList packet header. * * Entries are described by the ipa_control_info_entry structure. They contain * the numerical ID and type of the control. The control info data is stored * in the data section as described by the following diagram. * * ~~~~ * +-------------------------+ . * / | ... | | entry[n].offset * | +-------------------------+ <-----Β΄ * | | minimum value (#n) | \ * Data | +-------------------------+ | * section | | maximum value (#n) | | Entry #n * | +-------------------------+ | * | | default value (#n) | / * | +-------------------------+ * \ | ... | * +-------------------------+ * ~~~~ * * The minimum, maximum and default values are stored in the platform's native * data format. The ipa_control_info_entry::offset field stores the offset from * the beginning of the data section to the info data. * * Info data in the data section shall be stored in the same order as the * entries array, shall be aligned to a multiple of 8 bytes, and shall be * contiguous in memory. * * As for the ControlList packet, empty spaces may be present between the end of * the entries array and the data section, and after the data section. They * shall be ignored when parsing the packet. */ namespace libcamera { /** * \def IPA_CONTROLS_FORMAT_VERSION * \brief The current control serialization format version */ /** * \var ipa_controls_id_map_type * \brief Enumerates the different control id map types * * Each ControlInfoMap and ControlList refers to a control id map that * associates the ControlId references to a numerical identifier. * During the serialization procedure the raw pointers to the ControlId * instances cannot be transported on the wire, hence their numerical id is * used to identify them in the serialized data buffer. At deserialization time * it is required to associate back to the numerical id the ControlId instance * it represents. This enumeration describes which ControlIdMap should be * used to perform such operation. * * \var ipa_controls_id_map_type::IPA_CONTROL_ID_MAP_CONTROLS * \brief The numerical control identifier are resolved to a ControlId * using * the global controls::controls id map * \var ipa_controls_id_map_type::IPA_CONTROL_ID_MAP_PROPERTIES * \brief The numerical control identifier are resolved to a ControlId * using * the global properties::properties id map * \var ipa_controls_id_map_type::IPA_CONTROL_ID_MAP_V4L2 * \brief ControlId for V4L2 defined controls are created by the video device * that enumerates them, and are not available across the IPC boundaries. The * deserializer shall create new ControlId instances for them as well as store * them in a dedicated ControlIdMap. Only lookup by numerical id can be * performed on de-serialized ControlInfoMap that represents V4L2 controls. */ /** * \struct ipa_controls_header * \brief Serialized control packet header * \var ipa_controls_header::version * Control packet format version number (shall be IPA_CONTROLS_FORMAT_VERSION) * \var ipa_controls_header::handle * For ControlInfoMap packets, this field contains a unique non-zero handle * generated when the ControlInfoMap is serialized. For ControlList packets, * this field contains the handle of the corresponding ControlInfoMap. * \var ipa_controls_header::entries * Number of entries in the packet * \var ipa_controls_header::size * The total packet size in bytes * \var ipa_controls_header::data_offset * Offset in bytes from the beginning of the packet of the data section start * \var ipa_controls_header::id_map_type * The id map type as defined by the ipa_controls_id_map_type enumeration * \var ipa_controls_header::reserved * Reserved for future extensions */ static_assert(sizeof(ipa_controls_header) == 32, "Invalid ABI size change for struct ipa_control_header"); /** * \struct ipa_control_value_entry * \brief Description of a serialized ControlValue entry * \var ipa_control_value_entry::id * The numerical ID of the control * \var ipa_control_value_entry::type * The type of the control (defined by enum ControlType) * \var ipa_control_value_entry::is_array * True if the control value stores an array, false otherwise * \var ipa_control_value_entry::count * The number of control array entries for array controls (1 otherwise) * \var ipa_control_value_entry::offset * The offset in bytes from the beginning of the data section to the control * value data (shall be a multiple of 8 bytes). * \var ipa_control_value_entry::padding * Padding bytes (shall be set to 0) */ static_assert(sizeof(ipa_control_value_entry) == 16, "Invalid ABI size change for struct ipa_control_value_entry"); /** * \struct ipa_control_info_entry * \brief Description of a serialized ControlInfo entry * \var ipa_control_info_entry::id * The numerical ID of the control * \var ipa_control_info_entry::type * The type of the control (defined by enum ControlType) * \var ipa_control_info_entry::offset * The offset in bytes from the beginning of the data section to the control * info data (shall be a multiple of 8 bytes) * \var ipa_control_info_entry::padding * Padding bytes (shall be set to 0) */ static_assert(sizeof(ipa_control_info_entry) == 16, "Invalid ABI size change for struct ipa_control_info_entry"); } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_data_serializer.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Google Inc. * * Image Processing Algorithm data serializer */ #include "libcamera/internal/ipa_data_serializer.h" #include <unistd.h> #include <libcamera/base/log.h> /** * \file ipa_data_serializer.h * \brief IPA Data Serializer */ namespace libcamera { LOG_DEFINE_CATEGORY(IPADataSerializer) /** * \class IPADataSerializer * \brief IPA Data Serializer * * Static template class that provides functions for serializing and * deserializing IPA data. * * \todo Switch to Span instead of byte and fd vector * * \todo Harden the vector and map deserializer * * \todo For SharedFDs, instead of storing a validity flag, store an * index into the fd array. This will allow us to use views instead of copying. */ namespace { /** * \fn template<typename T> void appendPOD(std::vector<uint8_t> &vec, T val) * \brief Append POD to end of byte vector, in little-endian order * \tparam T Type of POD to append * \param[in] vec Byte vector to append to * \param[in] val Value to append * * This function is meant to be used by the IPA data serializer, and the * generated IPA proxies. */ /** * \fn template<typename T> T readPOD(std::vector<uint8_t>::iterator it, size_t pos, * std::vector<uint8_t>::iterator end) * \brief Read POD from byte vector, in little-endian order * \tparam T Type of POD to read * \param[in] it Iterator of byte vector to read from * \param[in] pos Index in byte vector to read from * \param[in] end Iterator marking end of byte vector * * This function is meant to be used by the IPA data serializer, and the * generated IPA proxies. * * If the \a pos plus the byte-width of the desired POD is past \a end, it is * a fata error will occur, as it means there is insufficient data for * deserialization, which should never happen. * * \return The POD read from \a it at index \a pos */ /** * \fn template<typename T> T readPOD(std::vector<uint8_t> &vec, size_t pos) * \brief Read POD from byte vector, in little-endian order * \tparam T Type of POD to read * \param[in] vec Byte vector to read from * \param[in] pos Index in vec to start reading from * * This function is meant to be used by the IPA data serializer, and the * generated IPA proxies. * * If the \a pos plus the byte-width of the desired POD is past the end of * \a vec, a fatal error will occur, as it means there is insufficient data * for deserialization, which should never happen. * * \return The POD read from \a vec at index \a pos */ } /* namespace */ /** * \fn template<typename T> IPADataSerializer<T>::serialize( * T data, * ControlSerializer *cs = nullptr) * \brief Serialize an object into byte vector and fd vector * \tparam T Type of object to serialize * \param[in] data Object to serialize * \param[in] cs ControlSerializer * * \a cs is only necessary if the object type \a T or its members contain * ControlList or ControlInfoMap. * * \return Tuple of byte vector and fd vector, that is the serialized form * of \a data */ /** * \fn template<typename T> IPADataSerializer<T>::deserialize( * const std::vector<uint8_t> &data, * ControlSerializer *cs = nullptr) * \brief Deserialize byte vector into an object * \tparam T Type of object to deserialize to * \param[in] data Byte vector to deserialize from * \param[in] cs ControlSerializer * * This version of deserialize() can be used if the object type \a T and its * members don't have any SharedFD. * * \a cs is only necessary if the object type \a T or its members contain * ControlList or ControlInfoMap. * * \return The deserialized object */ /** * \fn template<typename T> IPADataSerializer<T>::deserialize( * std::vector<uint8_t>::const_iterator dataBegin, * std::vector<uint8_t>::const_iterator dataEnd, * ControlSerializer *cs = nullptr) * \brief Deserialize byte vector into an object * \tparam T Type of object to deserialize to * \param[in] dataBegin Begin iterator of byte vector to deserialize from * \param[in] dataEnd End iterator of byte vector to deserialize from * \param[in] cs ControlSerializer * * This version of deserialize() can be used if the object type \a T and its * members don't have any SharedFD. * * \a cs is only necessary if the object type \a T or its members contain * ControlList or ControlInfoMap. * * \return The deserialized object */ /** * \fn template<typename T> IPADataSerializer<T>::deserialize( * const std::vector<uint8_t> &data, * const std::vector<SharedFD> &fds, * ControlSerializer *cs = nullptr) * \brief Deserialize byte vector and fd vector into an object * \tparam T Type of object to deserialize to * \param[in] data Byte vector to deserialize from * \param[in] fds Fd vector to deserialize from * \param[in] cs ControlSerializer * * This version of deserialize() (or the iterator version) must be used if * the object type \a T or its members contain SharedFD. * * \a cs is only necessary if the object type \a T or its members contain * ControlList or ControlInfoMap. * * \return The deserialized object */ /** * \fn template<typename T> IPADataSerializer::deserialize( * std::vector<uint8_t>::const_iterator dataBegin, * std::vector<uint8_t>::const_iterator dataEnd, * std::vector<SharedFD>::const_iterator fdsBegin, * std::vector<SharedFD>::const_iterator fdsEnd, * ControlSerializer *cs = nullptr) * \brief Deserialize byte vector and fd vector into an object * \tparam T Type of object to deserialize to * \param[in] dataBegin Begin iterator of byte vector to deserialize from * \param[in] dataEnd End iterator of byte vector to deserialize from * \param[in] fdsBegin Begin iterator of fd vector to deserialize from * \param[in] fdsEnd End iterator of fd vector to deserialize from * \param[in] cs ControlSerializer * * This version of deserialize() (or the vector version) must be used if * the object type \a T or its members contain SharedFD. * * \a cs is only necessary if the object type \a T or its members contain * ControlList or ControlInfoMap. * * \return The deserialized object */ #ifndef __DOXYGEN__ #define DEFINE_POD_SERIALIZER(type) \ \ template<> \ std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> \ IPADataSerializer<type>::serialize(const type &data, \ [[maybe_unused]] ControlSerializer *cs) \ { \ std::vector<uint8_t> dataVec; \ dataVec.reserve(sizeof(type)); \ appendPOD<type>(dataVec, data); \ \ return { dataVec, {} }; \ } \ \ template<> \ type IPADataSerializer<type>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, \ std::vector<uint8_t>::const_iterator dataEnd, \ [[maybe_unused]] ControlSerializer *cs) \ { \ return readPOD<type>(dataBegin, 0, dataEnd); \ } \ \ template<> \ type IPADataSerializer<type>::deserialize(const std::vector<uint8_t> &data, \ ControlSerializer *cs) \ { \ return deserialize(data.cbegin(), data.end(), cs); \ } \ \ template<> \ type IPADataSerializer<type>::deserialize(const std::vector<uint8_t> &data, \ [[maybe_unused]] const std::vector<SharedFD> &fds, \ ControlSerializer *cs) \ { \ return deserialize(data.cbegin(), data.end(), cs); \ } \ \ template<> \ type IPADataSerializer<type>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, \ std::vector<uint8_t>::const_iterator dataEnd, \ [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin, \ [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd, \ ControlSerializer *cs) \ { \ return deserialize(dataBegin, dataEnd, cs); \ } DEFINE_POD_SERIALIZER(bool) DEFINE_POD_SERIALIZER(uint8_t) DEFINE_POD_SERIALIZER(uint16_t) DEFINE_POD_SERIALIZER(uint32_t) DEFINE_POD_SERIALIZER(uint64_t) DEFINE_POD_SERIALIZER(int8_t) DEFINE_POD_SERIALIZER(int16_t) DEFINE_POD_SERIALIZER(int32_t) DEFINE_POD_SERIALIZER(int64_t) DEFINE_POD_SERIALIZER(float) DEFINE_POD_SERIALIZER(double) /* * Strings are serialized simply by converting by {string.cbegin(), string.end()}. * The size of the string is recorded by the container (struct, vector, map, or * function parameter serdes). */ template<> std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> IPADataSerializer<std::string>::serialize(const std::string &data, [[maybe_unused]] ControlSerializer *cs) { return { { data.cbegin(), data.end() }, {} }; } template<> std::string IPADataSerializer<std::string>::deserialize(const std::vector<uint8_t> &data, [[maybe_unused]] ControlSerializer *cs) { return { data.cbegin(), data.cend() }; } template<> std::string IPADataSerializer<std::string>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, [[maybe_unused]] ControlSerializer *cs) { return { dataBegin, dataEnd }; } template<> std::string IPADataSerializer<std::string>::deserialize(const std::vector<uint8_t> &data, [[maybe_unused]] const std::vector<SharedFD> &fds, [[maybe_unused]] ControlSerializer *cs) { return { data.cbegin(), data.cend() }; } template<> std::string IPADataSerializer<std::string>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd, [[maybe_unused]] ControlSerializer *cs) { return { dataBegin, dataEnd }; } /* * ControlList is serialized as: * * 4 bytes - uint32_t Size of serialized ControlInfoMap, in bytes * 4 bytes - uint32_t Size of serialized ControlList, in bytes * X bytes - Serialized ControlInfoMap (using ControlSerializer) * X bytes - Serialized ControlList (using ControlSerializer) * * If data.infoMap() is nullptr, then the default controls::controls will * be used. The serialized ControlInfoMap will have zero length. */ template<> std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> IPADataSerializer<ControlList>::serialize(const ControlList &data, ControlSerializer *cs) { if (!cs) LOG(IPADataSerializer, Fatal) << "ControlSerializer not provided for serialization of ControlList"; size_t size; std::vector<uint8_t> infoData; int ret; /* * \todo Revisit this opportunistic serialization of the * ControlInfoMap, as it could be fragile */ if (data.infoMap() && !cs->isCached(*data.infoMap())) { size = cs->binarySize(*data.infoMap()); infoData.resize(size); ByteStreamBuffer buffer(infoData.data(), infoData.size()); ret = cs->serialize(*data.infoMap(), buffer); if (ret < 0 || buffer.overflow()) { LOG(IPADataSerializer, Error) << "Failed to serialize ControlList's ControlInfoMap"; return { {}, {} }; } } size = cs->binarySize(data); std::vector<uint8_t> listData(size); ByteStreamBuffer buffer(listData.data(), listData.size()); ret = cs->serialize(data, buffer); if (ret < 0 || buffer.overflow()) { LOG(IPADataSerializer, Error) << "Failed to serialize ControlList"; return { {}, {} }; } std::vector<uint8_t> dataVec; dataVec.reserve(8 + infoData.size() + listData.size()); appendPOD<uint32_t>(dataVec, infoData.size()); appendPOD<uint32_t>(dataVec, listData.size()); dataVec.insert(dataVec.end(), infoData.begin(), infoData.end()); dataVec.insert(dataVec.end(), listData.begin(), listData.end()); return { dataVec, {} }; } template<> ControlList IPADataSerializer<ControlList>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, ControlSerializer *cs) { if (!cs) LOG(IPADataSerializer, Fatal) << "ControlSerializer not provided for deserialization of ControlList"; if (std::distance(dataBegin, dataEnd) < 8) return {}; uint32_t infoDataSize = readPOD<uint32_t>(dataBegin, 0, dataEnd); uint32_t listDataSize = readPOD<uint32_t>(dataBegin, 4, dataEnd); std::vector<uint8_t>::const_iterator it = dataBegin + 8; if (infoDataSize + listDataSize < infoDataSize || static_cast<uint32_t>(std::distance(it, dataEnd)) < infoDataSize + listDataSize) return {}; if (infoDataSize > 0) { ByteStreamBuffer buffer(&*it, infoDataSize); ControlInfoMap map = cs->deserialize<ControlInfoMap>(buffer); /* It's fine if map is empty. */ if (buffer.overflow()) { LOG(IPADataSerializer, Error) << "Failed to deserialize ControlLists's ControlInfoMap: buffer overflow"; return ControlList(); } } it += infoDataSize; ByteStreamBuffer buffer(&*it, listDataSize); ControlList list = cs->deserialize<ControlList>(buffer); if (buffer.overflow()) LOG(IPADataSerializer, Error) << "Failed to deserialize ControlList: buffer overflow"; return list; } template<> ControlList IPADataSerializer<ControlList>::deserialize(const std::vector<uint8_t> &data, ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), cs); } template<> ControlList IPADataSerializer<ControlList>::deserialize(const std::vector<uint8_t> &data, [[maybe_unused]] const std::vector<SharedFD> &fds, ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), cs); } template<> ControlList IPADataSerializer<ControlList>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd, ControlSerializer *cs) { return deserialize(dataBegin, dataEnd, cs); } /* * const ControlInfoMap is serialized as: * * 4 bytes - uint32_t Size of serialized ControlInfoMap, in bytes * X bytes - Serialized ControlInfoMap (using ControlSerializer) */ template<> std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> IPADataSerializer<ControlInfoMap>::serialize(const ControlInfoMap &map, ControlSerializer *cs) { if (!cs) LOG(IPADataSerializer, Fatal) << "ControlSerializer not provided for serialization of ControlInfoMap"; size_t size = cs->binarySize(map); std::vector<uint8_t> infoData(size); ByteStreamBuffer buffer(infoData.data(), infoData.size()); int ret = cs->serialize(map, buffer); if (ret < 0 || buffer.overflow()) { LOG(IPADataSerializer, Error) << "Failed to serialize ControlInfoMap"; return { {}, {} }; } std::vector<uint8_t> dataVec; appendPOD<uint32_t>(dataVec, infoData.size()); dataVec.insert(dataVec.end(), infoData.begin(), infoData.end()); return { dataVec, {} }; } template<> ControlInfoMap IPADataSerializer<ControlInfoMap>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, ControlSerializer *cs) { if (!cs) LOG(IPADataSerializer, Fatal) << "ControlSerializer not provided for deserialization of ControlInfoMap"; if (std::distance(dataBegin, dataEnd) < 4) return {}; uint32_t infoDataSize = readPOD<uint32_t>(dataBegin, 0, dataEnd); std::vector<uint8_t>::const_iterator it = dataBegin + 4; if (static_cast<uint32_t>(std::distance(it, dataEnd)) < infoDataSize) return {}; ByteStreamBuffer buffer(&*it, infoDataSize); ControlInfoMap map = cs->deserialize<ControlInfoMap>(buffer); return map; } template<> ControlInfoMap IPADataSerializer<ControlInfoMap>::deserialize(const std::vector<uint8_t> &data, ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), cs); } template<> ControlInfoMap IPADataSerializer<ControlInfoMap>::deserialize(const std::vector<uint8_t> &data, [[maybe_unused]] const std::vector<SharedFD> &fds, ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), cs); } template<> ControlInfoMap IPADataSerializer<ControlInfoMap>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd, ControlSerializer *cs) { return deserialize(dataBegin, dataEnd, cs); } /* * SharedFD instances are serialized into four bytes that tells if the SharedFD * is valid or not. If it is valid, then for serialization the fd will be * written to the fd vector, or for deserialization the fd vector const_iterator * will be valid. * * This validity is necessary so that we don't send -1 fd over sendmsg(). It * also allows us to simply send the entire fd vector into the deserializer * and it will be recursively consumed as necessary. */ template<> std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> IPADataSerializer<SharedFD>::serialize(const SharedFD &data, [[maybe_unused]] ControlSerializer *cs) { std::vector<uint8_t> dataVec; std::vector<SharedFD> fdVec; /* * Store as uint32_t to prepare for conversion from validity flag * to index, and for alignment. */ appendPOD<uint32_t>(dataVec, data.isValid()); if (data.isValid()) fdVec.push_back(data); return { dataVec, fdVec }; } template<> SharedFD IPADataSerializer<SharedFD>::deserialize([[maybe_unused]] std::vector<uint8_t>::const_iterator dataBegin, [[maybe_unused]] std::vector<uint8_t>::const_iterator dataEnd, std::vector<SharedFD>::const_iterator fdsBegin, std::vector<SharedFD>::const_iterator fdsEnd, [[maybe_unused]] ControlSerializer *cs) { ASSERT(std::distance(dataBegin, dataEnd) >= 4); uint32_t valid = readPOD<uint32_t>(dataBegin, 0, dataEnd); ASSERT(!(valid && std::distance(fdsBegin, fdsEnd) < 1)); return valid ? *fdsBegin : SharedFD(); } template<> SharedFD IPADataSerializer<SharedFD>::deserialize(const std::vector<uint8_t> &data, const std::vector<SharedFD> &fds, [[maybe_unused]] ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), fds.cbegin(), fds.end()); } /* * FrameBuffer::Plane is serialized as: * * 4 byte - SharedFD * 4 bytes - uint32_t Offset * 4 bytes - uint32_t Length */ template<> std::tuple<std::vector<uint8_t>, std::vector<SharedFD>> IPADataSerializer<FrameBuffer::Plane>::serialize(const FrameBuffer::Plane &data, [[maybe_unused]] ControlSerializer *cs) { std::vector<uint8_t> dataVec; std::vector<SharedFD> fdsVec; std::vector<uint8_t> fdBuf; std::vector<SharedFD> fdFds; std::tie(fdBuf, fdFds) = IPADataSerializer<SharedFD>::serialize(data.fd); dataVec.insert(dataVec.end(), fdBuf.begin(), fdBuf.end()); fdsVec.insert(fdsVec.end(), fdFds.begin(), fdFds.end()); appendPOD<uint32_t>(dataVec, data.offset); appendPOD<uint32_t>(dataVec, data.length); return { dataVec, fdsVec }; } template<> FrameBuffer::Plane IPADataSerializer<FrameBuffer::Plane>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, std::vector<uint8_t>::const_iterator dataEnd, std::vector<SharedFD>::const_iterator fdsBegin, [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd, [[maybe_unused]] ControlSerializer *cs) { FrameBuffer::Plane ret; ret.fd = IPADataSerializer<SharedFD>::deserialize(dataBegin, dataBegin + 4, fdsBegin, fdsBegin + 1); ret.offset = readPOD<uint32_t>(dataBegin, 4, dataEnd); ret.length = readPOD<uint32_t>(dataBegin, 8, dataEnd); return ret; } template<> FrameBuffer::Plane IPADataSerializer<FrameBuffer::Plane>::deserialize(const std::vector<uint8_t> &data, const std::vector<SharedFD> &fds, ControlSerializer *cs) { return deserialize(data.cbegin(), data.end(), fds.cbegin(), fds.end(), cs); } #endif /* __DOXYGEN__ */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/bayer_format.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Raspberry Pi Ltd * * Class to represent Bayer formats */ #include "libcamera/internal/bayer_format.h" #include <algorithm> #include <map> #include <sstream> #include <unordered_map> #include <linux/media-bus-format.h> #include <libcamera/formats.h> #include <libcamera/transform.h> /** * \file bayer_format.h * \brief Class to represent Bayer formats and manipulate them */ namespace libcamera { /** * \class BayerFormat * \brief Class to represent a raw image Bayer format * * This class encodes the different Bayer formats in such a way that they can * be easily manipulated. For example, the bit depth or Bayer order can be * easily altered - the Bayer order can even be "transformed" in the same * manner as happens in many sensors when their horizontal or vertical "flip" * controls are set. */ /** * \enum BayerFormat::Order * \brief The order of the colour channels in the Bayer pattern * * \var BayerFormat::BGGR * \brief B then G on the first row, G then R on the second row. * \var BayerFormat::GBRG * \brief G then B on the first row, R then G on the second row. * \var BayerFormat::GRBG * \brief G then R on the first row, B then G on the second row. * \var BayerFormat::RGGB * \brief R then G on the first row, G then B on the second row. * \var BayerFormat::MONO * \brief Monochrome image data, there is no colour filter array. */ /** * \enum BayerFormat::Packing * \brief Different types of packing that can be applied to a BayerFormat * * \var BayerFormat::Packing::None * \brief No packing * \var BayerFormat::Packing::CSI2 * \brief Format uses MIPI CSI-2 style packing * \var BayerFormat::Packing::IPU3 * \brief Format uses IPU3 style packing * \var BayerFormat::Packing::PISP1 * \brief Format uses PISP mode 1 compression * \var BayerFormat::Packing::PISP2 * \brief Format uses PISP mode 2 compression */ namespace { /* Define a slightly arbitrary ordering so that we can use a std::map. */ struct BayerFormatComparator { constexpr bool operator()(const BayerFormat &lhs, const BayerFormat &rhs) const { if (lhs.bitDepth < rhs.bitDepth) return true; else if (lhs.bitDepth > rhs.bitDepth) return false; if (lhs.order < rhs.order) return true; else if (lhs.order > rhs.order) return false; if (lhs.packing < rhs.packing) return true; else return false; } }; struct Formats { PixelFormat pixelFormat; V4L2PixelFormat v4l2Format; }; const std::map<BayerFormat, Formats, BayerFormatComparator> bayerToFormat{ { { BayerFormat::BGGR, 8, BayerFormat::Packing::None }, { formats::SBGGR8, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR8) } }, { { BayerFormat::GBRG, 8, BayerFormat::Packing::None }, { formats::SGBRG8, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG8) } }, { { BayerFormat::GRBG, 8, BayerFormat::Packing::None }, { formats::SGRBG8, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG8) } }, { { BayerFormat::RGGB, 8, BayerFormat::Packing::None }, { formats::SRGGB8, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB8) } }, { { BayerFormat::BGGR, 10, BayerFormat::Packing::None }, { formats::SBGGR10, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10) } }, { { BayerFormat::GBRG, 10, BayerFormat::Packing::None }, { formats::SGBRG10, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10) } }, { { BayerFormat::GRBG, 10, BayerFormat::Packing::None }, { formats::SGRBG10, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10) } }, { { BayerFormat::RGGB, 10, BayerFormat::Packing::None }, { formats::SRGGB10, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10) } }, { { BayerFormat::BGGR, 10, BayerFormat::Packing::CSI2 }, { formats::SBGGR10_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10P) } }, { { BayerFormat::GBRG, 10, BayerFormat::Packing::CSI2 }, { formats::SGBRG10_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10P) } }, { { BayerFormat::GRBG, 10, BayerFormat::Packing::CSI2 }, { formats::SGRBG10_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10P) } }, { { BayerFormat::RGGB, 10, BayerFormat::Packing::CSI2 }, { formats::SRGGB10_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10P) } }, { { BayerFormat::BGGR, 10, BayerFormat::Packing::IPU3 }, { formats::SBGGR10_IPU3, V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SBGGR10) } }, { { BayerFormat::GBRG, 10, BayerFormat::Packing::IPU3 }, { formats::SGBRG10_IPU3, V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGBRG10) } }, { { BayerFormat::GRBG, 10, BayerFormat::Packing::IPU3 }, { formats::SGRBG10_IPU3, V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGRBG10) } }, { { BayerFormat::RGGB, 10, BayerFormat::Packing::IPU3 }, { formats::SRGGB10_IPU3, V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SRGGB10) } }, { { BayerFormat::BGGR, 12, BayerFormat::Packing::None }, { formats::SBGGR12, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12) } }, { { BayerFormat::GBRG, 12, BayerFormat::Packing::None }, { formats::SGBRG12, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12) } }, { { BayerFormat::GRBG, 12, BayerFormat::Packing::None }, { formats::SGRBG12, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12) } }, { { BayerFormat::RGGB, 12, BayerFormat::Packing::None }, { formats::SRGGB12, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12) } }, { { BayerFormat::BGGR, 12, BayerFormat::Packing::CSI2 }, { formats::SBGGR12_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12P) } }, { { BayerFormat::GBRG, 12, BayerFormat::Packing::CSI2 }, { formats::SGBRG12_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12P) } }, { { BayerFormat::GRBG, 12, BayerFormat::Packing::CSI2 }, { formats::SGRBG12_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12P) } }, { { BayerFormat::RGGB, 12, BayerFormat::Packing::CSI2 }, { formats::SRGGB12_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12P) } }, { { BayerFormat::BGGR, 14, BayerFormat::Packing::None }, { formats::SBGGR14, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14) } }, { { BayerFormat::GBRG, 14, BayerFormat::Packing::None }, { formats::SGBRG14, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14) } }, { { BayerFormat::GRBG, 14, BayerFormat::Packing::None }, { formats::SGRBG14, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14) } }, { { BayerFormat::RGGB, 14, BayerFormat::Packing::None }, { formats::SRGGB14, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14) } }, { { BayerFormat::BGGR, 14, BayerFormat::Packing::CSI2 }, { formats::SBGGR14_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR14P) } }, { { BayerFormat::GBRG, 14, BayerFormat::Packing::CSI2 }, { formats::SGBRG14_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG14P) } }, { { BayerFormat::GRBG, 14, BayerFormat::Packing::CSI2 }, { formats::SGRBG14_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG14P) } }, { { BayerFormat::RGGB, 14, BayerFormat::Packing::CSI2 }, { formats::SRGGB14_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB14P) } }, { { BayerFormat::BGGR, 16, BayerFormat::Packing::None }, { formats::SBGGR16, V4L2PixelFormat(V4L2_PIX_FMT_SBGGR16) } }, { { BayerFormat::GBRG, 16, BayerFormat::Packing::None }, { formats::SGBRG16, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG16) } }, { { BayerFormat::GRBG, 16, BayerFormat::Packing::None }, { formats::SGRBG16, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG16) } }, { { BayerFormat::RGGB, 16, BayerFormat::Packing::None }, { formats::SRGGB16, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB16) } }, { { BayerFormat::BGGR, 16, BayerFormat::Packing::PISP1 }, { formats::BGGR_PISP_COMP1, V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_BGGR) } }, { { BayerFormat::GBRG, 16, BayerFormat::Packing::PISP1 }, { formats::GBRG_PISP_COMP1, V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GBRG) } }, { { BayerFormat::GRBG, 16, BayerFormat::Packing::PISP1 }, { formats::GRBG_PISP_COMP1, V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_GRBG) } }, { { BayerFormat::RGGB, 16, BayerFormat::Packing::PISP1 }, { formats::RGGB_PISP_COMP1, V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_RGGB) } }, { { BayerFormat::MONO, 8, BayerFormat::Packing::None }, { formats::R8, V4L2PixelFormat(V4L2_PIX_FMT_GREY) } }, { { BayerFormat::MONO, 10, BayerFormat::Packing::None }, { formats::R10, V4L2PixelFormat(V4L2_PIX_FMT_Y10) } }, { { BayerFormat::MONO, 10, BayerFormat::Packing::CSI2 }, { formats::R10_CSI2P, V4L2PixelFormat(V4L2_PIX_FMT_Y10P) } }, { { BayerFormat::MONO, 12, BayerFormat::Packing::None }, { formats::R12, V4L2PixelFormat(V4L2_PIX_FMT_Y12) } }, { { BayerFormat::MONO, 16, BayerFormat::Packing::None }, { formats::R16, V4L2PixelFormat(V4L2_PIX_FMT_Y16) } }, { { BayerFormat::MONO, 16, BayerFormat::Packing::PISP1 }, { formats::MONO_PISP_COMP1, V4L2PixelFormat(V4L2_PIX_FMT_PISP_COMP1_MONO) } }, }; const std::unordered_map<unsigned int, BayerFormat> mbusCodeToBayer{ { MEDIA_BUS_FMT_SBGGR8_1X8, { BayerFormat::BGGR, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG8_1X8, { BayerFormat::GBRG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG8_1X8, { BayerFormat::GRBG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB8_1X8, { BayerFormat::RGGB, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, { BayerFormat::BGGR, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, { BayerFormat::GBRG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, { BayerFormat::GRBG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, { BayerFormat::RGGB, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, { BayerFormat::BGGR, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, { BayerFormat::GBRG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, { BayerFormat::GRBG, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, { BayerFormat::RGGB, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, { BayerFormat::BGGR, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, { BayerFormat::BGGR, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, { BayerFormat::BGGR, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, { BayerFormat::BGGR, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR10_1X10, { BayerFormat::BGGR, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG10_1X10, { BayerFormat::GBRG, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG10_1X10, { BayerFormat::GRBG, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB10_1X10, { BayerFormat::RGGB, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR12_1X12, { BayerFormat::BGGR, 12, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG12_1X12, { BayerFormat::GBRG, 12, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG12_1X12, { BayerFormat::GRBG, 12, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB12_1X12, { BayerFormat::RGGB, 12, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR14_1X14, { BayerFormat::BGGR, 14, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG14_1X14, { BayerFormat::GBRG, 14, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG14_1X14, { BayerFormat::GRBG, 14, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB14_1X14, { BayerFormat::RGGB, 14, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SBGGR16_1X16, { BayerFormat::BGGR, 16, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGBRG16_1X16, { BayerFormat::GBRG, 16, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SGRBG16_1X16, { BayerFormat::GRBG, 16, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_SRGGB16_1X16, { BayerFormat::RGGB, 16, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_Y8_1X8, { BayerFormat::MONO, 8, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_Y10_1X10, { BayerFormat::MONO, 10, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_Y12_1X12, { BayerFormat::MONO, 12, BayerFormat::Packing::None } }, { MEDIA_BUS_FMT_Y16_1X16, { BayerFormat::MONO, 16, BayerFormat::Packing::None } }, }; } /* namespace */ /** * \fn BayerFormat::BayerFormat() * \brief Construct an empty (and invalid) BayerFormat */ /** * \fn BayerFormat::BayerFormat(Order o, uint8_t b, Packing p) * \brief Construct a BayerFormat from explicit values * \param[in] o The order of the Bayer pattern * \param[in] b The bit depth of the Bayer samples * \param[in] p The type of packing applied to the pixel values */ /** * \brief Retrieve the BayerFormat associated with a media bus code * \param[in] mbusCode The media bus code to convert into a BayerFormat * * The media bus code numeric identifiers are defined by the V4L2 specification. */ const BayerFormat &BayerFormat::fromMbusCode(unsigned int mbusCode) { static BayerFormat empty; const auto it = mbusCodeToBayer.find(mbusCode); if (it == mbusCodeToBayer.end()) return empty; else return it->second; } /** * \fn BayerFormat::isValid() * \brief Return whether a BayerFormat is valid */ /** * \brief Assemble and return a readable string representation of the * BayerFormat * \return A string describing the BayerFormat */ std::string BayerFormat::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Compare two BayerFormats for equality * \return True if order, bitDepth and packing are equal, or false otherwise */ bool operator==(const BayerFormat &lhs, const BayerFormat &rhs) { return lhs.order == rhs.order && lhs.bitDepth == rhs.bitDepth && lhs.packing == rhs.packing; } /** * \brief Insert a text representation of a BayerFormats into an output stream * \param[in] out The output stream * \param[in] f The BayerFormat * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const BayerFormat &f) { static const char *orderStrings[] = { "BGGR-", "GBRG-", "GRBG-", "RGGB-", "MONO-" }; if (!f.isValid() || f.order > BayerFormat::MONO) { out << "INVALID"; return out; } /* The cast is required to avoid bitDepth being interpreted as a char. */ out << orderStrings[f.order] << static_cast<unsigned int>(f.bitDepth); if (f.packing == BayerFormat::Packing::CSI2) out << "-CSI2P"; else if (f.packing == BayerFormat::Packing::IPU3) out << "-IPU3P"; else if (f.packing == BayerFormat::Packing::PISP1) out << "-PISP1"; else if (f.packing == BayerFormat::Packing::PISP2) out << "-PISP2"; return out; } /** * \fn bool operator!=(const BayerFormat &lhs, const BayerFormat &rhs) * \brief Compare two BayerFormats for inequality * \return True if either order, bitdepth or packing are not equal, or false * otherwise */ /** * \brief Convert a BayerFormat into the corresponding V4L2PixelFormat * \return The V4L2PixelFormat corresponding to this BayerFormat */ V4L2PixelFormat BayerFormat::toV4L2PixelFormat() const { const auto it = bayerToFormat.find(*this); if (it != bayerToFormat.end()) return it->second.v4l2Format; return V4L2PixelFormat(); } /** * \brief Convert \a v4l2Format to the corresponding BayerFormat * \param[in] v4l2Format The raw format to convert into a BayerFormat * \return The BayerFormat corresponding to \a v4l2Format */ BayerFormat BayerFormat::fromV4L2PixelFormat(V4L2PixelFormat v4l2Format) { auto it = std::find_if(bayerToFormat.begin(), bayerToFormat.end(), [v4l2Format](const auto &i) { return i.second.v4l2Format == v4l2Format; }); if (it != bayerToFormat.end()) return it->first; return BayerFormat(); } /** * \brief Convert a BayerFormat into the corresponding PixelFormat * \return The PixelFormat corresponding to this BayerFormat */ PixelFormat BayerFormat::toPixelFormat() const { const auto it = bayerToFormat.find(*this); if (it != bayerToFormat.end()) return it->second.pixelFormat; return PixelFormat(); } /** * \brief Convert a PixelFormat into the corresponding BayerFormat * \return The BayerFormat corresponding to this PixelFormat */ BayerFormat BayerFormat::fromPixelFormat(PixelFormat format) { const auto it = std::find_if(bayerToFormat.begin(), bayerToFormat.end(), [format](const auto &i) { return i.second.pixelFormat == format; }); if (it != bayerToFormat.end()) return it->first; return BayerFormat(); } /** * \brief Apply a transform to this BayerFormat * \param[in] t The transform to apply * * Applying a transform to an image stored in a Bayer format affects the Bayer * order. For example, performing a horizontal flip on the Bayer pattern RGGB * causes the RG rows of pixels to become GR, and the GB rows to become BG. The * transformed image would have a GRBG order. Performing a vertical flip on the * Bayer pattern RGGB causes the GB rows to come before the RG ones and the * transformed image would have GBRG order. Applying both vertical and * horizontal flips on the Bayer patter RGGB results in transformed images with * BGGR order. The bit depth and modifiers are not affected. * * Horizontal and vertical flips are applied before transpose. * * \return The transformed Bayer format */ BayerFormat BayerFormat::transform(Transform t) const { BayerFormat result = *this; if (order == MONO) return result; /* * Observe that flipping bit 0 of the Order enum performs a horizontal * mirror on the Bayer pattern (e.g. RG/GB goes to GR/BG). Similarly, * flipping bit 1 performs a vertical mirror operation on it (e.g RG/GB * goes to GB/RG). Applying both vertical and horizontal flips * combines vertical and horizontal mirroring on the Bayer pattern * (e.g. RG/GB goes to BG/GR). Hence: */ if (!!(t & Transform::HFlip)) result.order = static_cast<Order>(result.order ^ 1); if (!!(t & Transform::VFlip)) result.order = static_cast<Order>(result.order ^ 2); if (!!(t & Transform::Transpose) && result.order == 1) result.order = static_cast<Order>(2); else if (!!(t & Transform::Transpose) && result.order == 2) result.order = static_cast<Order>(1); return result; } /** * \var BayerFormat::order * \brief The order of the colour channels in the Bayer pattern */ /** * \var BayerFormat::bitDepth * \brief The bit depth of the samples in the Bayer pattern */ /** * \var BayerFormat::packing * \brief Any packing scheme applied to this BayerFormat */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/request.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * Capture request handling */ #include "libcamera/internal/request.h" #include <map> #include <sstream> #include <libcamera/base/log.h> #include <libcamera/camera.h> #include <libcamera/control_ids.h> #include <libcamera/fence.h> #include <libcamera/framebuffer.h> #include <libcamera/stream.h> #include "libcamera/internal/camera.h" #include "libcamera/internal/camera_controls.h" #include "libcamera/internal/framebuffer.h" #include "libcamera/internal/tracepoints.h" /** * \file libcamera/request.h * \brief Describes a frame capture request to be processed by a camera */ namespace libcamera { LOG_DEFINE_CATEGORY(Request) /** * \class Request::Private * \brief Request private data * * The Request::Private class stores all private data associated with a * request. It implements the d-pointer design pattern to hide core * Request data from the public API, and exposes utility functions to * internal users of the request (namely the PipelineHandler class and its * subclasses). */ /** * \brief Create a Request::Private * \param camera The Camera that creates the request */ Request::Private::Private(Camera *camera) : camera_(camera), cancelled_(false) { } Request::Private::~Private() { doCancelRequest(); } /** * \fn Request::Private::camera() * \brief Retrieve the camera this request has been queued to * \return The Camera this request has been queued to, or nullptr if the * request hasn't been queued */ /** * \brief Check if a request has buffers yet to be completed * * \return True if the request has buffers pending for completion, false * otherwise */ bool Request::Private::hasPendingBuffers() const { return !pending_.empty(); } /** * \brief Complete a buffer for the request * \param[in] buffer The buffer that has completed * * A request tracks the status of all buffers it contains through a set of * pending buffers. This function removes the \a buffer from the set to mark it * as complete. All buffers associate with the request shall be marked as * complete by calling this function once and once only before reporting the * request as complete with the complete() function. * * \return True if all buffers contained in the request have completed, false * otherwise */ bool Request::Private::completeBuffer(FrameBuffer *buffer) { LIBCAMERA_TRACEPOINT(request_complete_buffer, this, buffer); int ret = pending_.erase(buffer); ASSERT(ret == 1); buffer->_d()->setRequest(nullptr); if (buffer->metadata().status == FrameMetadata::FrameCancelled) cancelled_ = true; return !hasPendingBuffers(); } /** * \brief Complete a queued request * * Mark the request as complete by updating its status to RequestComplete, * unless buffers have been cancelled in which case the status is set to * RequestCancelled. */ void Request::Private::complete() { Request *request = _o<Request>(); ASSERT(request->status() == RequestPending); ASSERT(!hasPendingBuffers()); request->status_ = cancelled_ ? RequestCancelled : RequestComplete; LOG(Request, Debug) << request->toString(); LIBCAMERA_TRACEPOINT(request_complete, this); } void Request::Private::doCancelRequest() { Request *request = _o<Request>(); for (FrameBuffer *buffer : pending_) { buffer->_d()->cancel(); camera_->bufferCompleted.emit(request, buffer); } cancelled_ = true; pending_.clear(); notifiers_.clear(); timer_.reset(); } /** * \brief Cancel a queued request * * Mark the request and its associated buffers as cancelled and complete it. * * Set each pending buffer in error state and emit the buffer completion signal * before completing the Request. */ void Request::Private::cancel() { LIBCAMERA_TRACEPOINT(request_cancel, this); Request *request = _o<Request>(); ASSERT(request->status() == RequestPending); doCancelRequest(); } /** * \brief Reset the request internal data to default values * * After calling this function, all request internal data will have default * values as if the Request::Private instance had just been constructed. */ void Request::Private::reset() { sequence_ = 0; cancelled_ = false; prepared_ = false; pending_.clear(); notifiers_.clear(); timer_.reset(); } /* * Helper function to save some lines of code and make sure prepared_ is set * to true before emitting the signal. */ void Request::Private::emitPrepareCompleted() { prepared_ = true; prepared.emit(); } /** * \brief Prepare the Request to be queued to the device * \param[in] timeout Optional expiration timeout * * Prepare a Request to be queued to the hardware device by ensuring it is * ready for the incoming memory transfers. * * This currently means waiting on each frame buffer acquire fence to be * signalled. An optional expiration timeout can be specified. If not all the * fences have been signalled correctly before the timeout expires the Request * is cancelled. * * The function immediately emits the prepared signal if all the prepare * operations have been completed synchronously. If instead the prepare * operations require to wait the completion of asynchronous events, such as * fences notifications or timer expiration, the prepared signal is emitted upon * the asynchronous event completion. * * As we currently only handle fences, the function emits the prepared signal * immediately if there are no fences to wait on. Otherwise the prepared signal * is emitted when all fences have been signalled or the optional timeout has * expired. * * If not all the fences have been correctly signalled or the optional timeout * has expired the Request will be cancelled and the Request::prepared signal * emitted. * * The intended user of this function is the PipelineHandler base class, which * 'prepares' a Request before queuing it to the hardware device. */ void Request::Private::prepare(std::chrono::milliseconds timeout) { /* Create and connect one notifier for each synchronization fence. */ for (FrameBuffer *buffer : pending_) { const Fence *fence = buffer->_d()->fence(); if (!fence) continue; std::unique_ptr<EventNotifier> notifier = std::make_unique<EventNotifier>(fence->fd().get(), EventNotifier::Read); notifier->activated.connect(this, [this, buffer] { notifierActivated(buffer); }); notifiers_[buffer] = std::move(notifier); } if (notifiers_.empty()) { emitPrepareCompleted(); return; } /* * In case a timeout is specified, create a timer and set it up. * * The timer must be created here instead of in the Request constructor, * in order to be bound to the pipeline handler thread. */ if (timeout != 0ms) { timer_ = std::make_unique<Timer>(); timer_->timeout.connect(this, &Request::Private::timeout); timer_->start(timeout); } } /** * \var Request::Private::prepared * \brief Request preparation completed Signal * * The signal is emitted once the request preparation has completed and is ready * to be queued. The Request might complete with errors in which case it is * cancelled. * * The intended slot for this signal is the PipelineHandler::doQueueRequests() * function which queues Request after they have been prepared or cancel them * if they have failed preparing. */ void Request::Private::notifierActivated(FrameBuffer *buffer) { /* Close the fence if successfully signalled. */ ASSERT(buffer); buffer->releaseFence(); /* Remove the entry from the map and check if other fences are pending. */ auto it = notifiers_.find(buffer); ASSERT(it != notifiers_.end()); notifiers_.erase(it); Request *request = _o<Request>(); LOG(Request, Debug) << "Request " << request->cookie() << " buffer " << buffer << " fence signalled"; if (!notifiers_.empty()) return; /* All fences completed, delete the timer and emit the prepared signal. */ timer_.reset(); emitPrepareCompleted(); } void Request::Private::timeout() { /* A timeout can only happen if there are fences not yet signalled. */ ASSERT(!notifiers_.empty()); notifiers_.clear(); Request *request = _o<Request>(); LOG(Request, Debug) << "Request prepare timeout: " << request->cookie(); cancel(); emitPrepareCompleted(); } /** * \enum Request::Status * Request completion status * \var Request::RequestPending * The request hasn't completed yet * \var Request::RequestComplete * The request has completed * \var Request::RequestCancelled * The request has been cancelled due to capture stop */ /** * \enum Request::ReuseFlag * Flags to control the behavior of Request::reuse() * \var Request::Default * Don't reuse buffers * \var Request::ReuseBuffers * Reuse the buffers that were previously added by addBuffer() */ /** * \typedef Request::BufferMap * \brief A map of Stream to FrameBuffer pointers */ /** * \class Request * \brief A frame capture request * * A Request allows an application to associate buffers and controls on a * per-frame basis to be queued to the camera device for processing. */ /** * \brief Create a capture request for a camera * \param[in] camera The camera that creates the request * \param[in] cookie Opaque cookie for application use * * The \a cookie is stored in the request and is accessible through the * cookie() function at any time. It is typically used by applications to map * the request to an external resource in the request completion handler, and is * completely opaque to libcamera. */ Request::Request(Camera *camera, uint64_t cookie) : Extensible(std::make_unique<Private>(camera)), cookie_(cookie), status_(RequestPending) { controls_ = new ControlList(controls::controls, camera->_d()->validator()); /** * \todo Add a validator for metadata controls. */ metadata_ = new ControlList(controls::controls); LIBCAMERA_TRACEPOINT(request_construct, this); LOG(Request, Debug) << "Created request - cookie: " << cookie_; } Request::~Request() { LIBCAMERA_TRACEPOINT(request_destroy, this); delete metadata_; delete controls_; } /** * \brief Reset the request for reuse * \param[in] flags Indicate whether or not to reuse the buffers * * Reset the status and controls associated with the request, to allow it to * be reused and requeued without destruction. This function shall be called * prior to queueing the request to the camera, in lieu of constructing a new * request. The application can reuse the buffers that were previously added * to the request via addBuffer() by setting \a flags to ReuseBuffers. */ void Request::reuse(ReuseFlag flags) { LIBCAMERA_TRACEPOINT(request_reuse, this); _d()->reset(); if (flags & ReuseBuffers) { for (auto pair : bufferMap_) { FrameBuffer *buffer = pair.second; buffer->_d()->setRequest(this); _d()->pending_.insert(buffer); } } else { bufferMap_.clear(); } status_ = RequestPending; controls_->clear(); metadata_->clear(); } /** * \fn Request::controls() * \brief Retrieve the request's ControlList * * Requests store a list of controls to be applied to all frames captured for * the request. They are created with an empty list of controls that can be * accessed through this function. Control values can be retrieved using * ControlList::get() and updated using ControlList::set(). * * Only controls supported by the camera to which this request will be * submitted shall be included in the controls list. Attempting to add an * unsupported control causes undefined behaviour. * * \return A reference to the ControlList in this request */ /** * \fn Request::buffers() * \brief Retrieve the request's streams to buffers map * * Return a reference to the map that associates each Stream part of the * request to the FrameBuffer the Stream output should be directed to. * * \return The map of Stream to FrameBuffer */ /** * \brief Add a FrameBuffer with its associated Stream to the Request * \param[in] stream The stream the buffer belongs to * \param[in] buffer The FrameBuffer to add to the request * \param[in] fence The optional fence * * A reference to the buffer is stored in the request. The caller is responsible * for ensuring that the buffer will remain valid until the request complete * callback is called. * * A request can only contain one buffer per stream. If a buffer has already * been added to the request for the same stream, this function returns -EEXIST. * * A Fence can be optionally associated with the \a buffer. * * When a valid Fence is provided to this function, \a fence is moved to \a * buffer and this Request will only be queued to the device once the * fences of all its buffers have been correctly signalled. * * If the \a fence associated with \a buffer isn't signalled, the request will * fail after a timeout. The buffer will still contain the fence, which * applications must retrieve with FrameBuffer::releaseFence() before the buffer * can be reused in another request. Attempting to add a buffer that still * contains a fence to a request will result in this function returning -EEXIST. * * \sa FrameBuffer::releaseFence() * * \return 0 on success or a negative error code otherwise * \retval -EEXIST The request already contains a buffer for the stream * or the buffer still references a fence * \retval -EINVAL The buffer does not reference a valid Stream */ int Request::addBuffer(const Stream *stream, FrameBuffer *buffer, std::unique_ptr<Fence> fence) { if (!stream) { LOG(Request, Error) << "Invalid stream reference"; return -EINVAL; } auto it = bufferMap_.find(stream); if (it != bufferMap_.end()) { LOG(Request, Error) << "FrameBuffer already set for stream"; return -EEXIST; } buffer->_d()->setRequest(this); _d()->pending_.insert(buffer); bufferMap_[stream] = buffer; /* * Make sure the fence has been extracted from the buffer * to avoid waiting on a stale fence. */ if (buffer->_d()->fence()) { LOG(Request, Error) << "Can't add buffer that still references a fence"; return -EEXIST; } if (fence && fence->isValid()) buffer->_d()->setFence(std::move(fence)); return 0; } /** * \var Request::bufferMap_ * \brief Mapping of streams to buffers for this request * * The bufferMap_ tracks the buffers associated with each stream. If a stream is * not utilised in this request there will be no buffer for that stream in the * map. */ /** * \brief Return the buffer associated with a stream * \param[in] stream The stream the buffer is associated to * \return The buffer associated with the stream, or nullptr if the stream is * not part of this request */ FrameBuffer *Request::findBuffer(const Stream *stream) const { const auto it = bufferMap_.find(stream); if (it == bufferMap_.end()) return nullptr; return it->second; } /** * \fn Request::metadata() * \brief Retrieve the request's metadata * \todo Offer a read-only API towards applications while keeping a read/write * API internally. * \return The metadata associated with the request */ /** * \brief Retrieve the sequence number for the request * * When requests are queued, they are given a sequential number to track the * order in which requests are queued to a camera. This number counts all * requests given to a camera and is reset to zero between camera stop/start * sequences. * * It can be used to support debugging and identifying the flow of requests * through a pipeline, but does not guarantee to represent the sequence number * of any images in the stream. The sequence number is stored as an unsigned * integer and will wrap when overflowed. * * \return The request sequence number */ uint32_t Request::sequence() const { return _d()->sequence_; } /** * \fn Request::cookie() * \brief Retrieve the cookie set when the request was created * \return The request cookie */ /** * \fn Request::status() * \brief Retrieve the request completion status * * The request status indicates whether the request has completed successfully * or with an error. When requests are created and before they complete the * request status is set to RequestPending, and is updated at completion time * to RequestComplete. If a request is cancelled at capture stop before it has * completed, its status is set to RequestCancelled. * * \return The request completion status */ /** * \brief Check if a request has buffers yet to be completed * * \return True if the request has buffers pending for completion, false * otherwise */ bool Request::hasPendingBuffers() const { return !_d()->pending_.empty(); } /** * \brief Generate a string representation of the Request internals * * This function facilitates debugging of Request state while it is used * internally within libcamera. * * \return A string representing the current state of the request */ std::string Request::toString() const { std::stringstream ss; ss << *this; return ss.str(); } /** * \brief Insert a text representation of a Request into an output stream * \param[in] out The output stream * \param[in] r The Request * \return The output stream \a out */ std::ostream &operator<<(std::ostream &out, const Request &r) { /* Pending, Completed, Cancelled(X). */ static const char *statuses = "PCX"; /* Example Output: Request(55:P:1/2:6523524) */ out << "Request(" << r.sequence() << ":" << statuses[r.status()] << ":" << r._d()->pending_.size() << "/" << r.buffers().size() << ":" << r.cookie() << ")"; return out; } } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/camera_manager.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2018, Google Inc. * * Camera management */ #include "libcamera/internal/camera_manager.h" #include <libcamera/base/log.h> #include <libcamera/base/utils.h> #include <libcamera/camera.h> #include <libcamera/property_ids.h> #include "libcamera/internal/camera.h" #include "libcamera/internal/device_enumerator.h" #include "libcamera/internal/pipeline_handler.h" /** * \file libcamera/camera_manager.h * \brief The camera manager */ /** * \file libcamera/internal/camera_manager.h * \brief Internal camera manager support */ /** * \brief Top-level libcamera namespace */ namespace libcamera { LOG_DEFINE_CATEGORY(Camera) CameraManager::Private::Private() : initialized_(false) { } int CameraManager::Private::start() { int status; /* Start the thread and wait for initialization to complete. */ Thread::start(); { MutexLocker locker(mutex_); cv_.wait(locker, [&]() LIBCAMERA_TSA_REQUIRES(mutex_) { return initialized_; }); status = status_; } /* If a failure happened during initialization, stop the thread. */ if (status < 0) { exit(); wait(); return status; } return 0; } void CameraManager::Private::run() { LOG(Camera, Debug) << "Starting camera manager"; int ret = init(); mutex_.lock(); status_ = ret; initialized_ = true; mutex_.unlock(); cv_.notify_one(); if (ret < 0) return; /* Now start processing events and messages. */ exec(); cleanup(); } int CameraManager::Private::init() { enumerator_ = DeviceEnumerator::create(); if (!enumerator_ || enumerator_->enumerate()) return -ENODEV; createPipelineHandlers(); enumerator_->devicesAdded.connect(this, &Private::createPipelineHandlers); return 0; } void CameraManager::Private::createPipelineHandlers() { /* * \todo Try to read handlers and order from configuration * file and only fallback on environment variable or all handlers, if * there is no configuration file. */ const char *pipesList = utils::secure_getenv("LIBCAMERA_PIPELINES_MATCH_LIST"); if (pipesList) { /* * When a list of preferred pipelines is defined, iterate * through the ordered list to match the enumerated devices. */ for (const auto &pipeName : utils::split(pipesList, ",")) { const PipelineHandlerFactoryBase *factory; factory = PipelineHandlerFactoryBase::getFactoryByName(pipeName); if (!factory) continue; LOG(Camera, Debug) << "Found listed pipeline handler '" << pipeName << "'"; pipelineFactoryMatch(factory); } return; } const std::vector<PipelineHandlerFactoryBase *> &factories = PipelineHandlerFactoryBase::factories(); /* Match all the registered pipeline handlers. */ for (const PipelineHandlerFactoryBase *factory : factories) { LOG(Camera, Debug) << "Found registered pipeline handler '" << factory->name() << "'"; /* * Try each pipeline handler until it exhaust * all pipelines it can provide. */ pipelineFactoryMatch(factory); } } void CameraManager::Private::pipelineFactoryMatch(const PipelineHandlerFactoryBase *factory) { CameraManager *const o = LIBCAMERA_O_PTR(); /* Provide as many matching pipelines as possible. */ while (1) { std::shared_ptr<PipelineHandler> pipe = factory->create(o); if (!pipe->match(enumerator_.get())) break; LOG(Camera, Debug) << "Pipeline handler \"" << factory->name() << "\" matched"; } } void CameraManager::Private::cleanup() { enumerator_->devicesAdded.disconnect(this); /* * Release all references to cameras to ensure they all get destroyed * before the device enumerator deletes the media devices. Cameras are * destroyed via Object::deleteLater() API, hence we need to explicitly * process deletion requests from the thread's message queue as the event * loop is not in action here. */ { MutexLocker locker(mutex_); cameras_.clear(); } dispatchMessages(Message::Type::DeferredDelete); enumerator_.reset(nullptr); } /** * \brief Add a camera to the camera manager * \param[in] camera The camera to be added * * This function is called by pipeline handlers to register the cameras they * handle with the camera manager. Registered cameras are immediately made * available to the system. * * Device numbers from the SystemDevices property are used by the V4L2 * compatibility layer to map V4L2 device nodes to Camera instances. * * \context This function shall be called from the CameraManager thread. */ void CameraManager::Private::addCamera(std::shared_ptr<Camera> camera) { ASSERT(Thread::current() == this); MutexLocker locker(mutex_); for (const std::shared_ptr<Camera> &c : cameras_) { if (c->id() == camera->id()) { LOG(Camera, Fatal) << "Trying to register a camera with a duplicated ID '" << camera->id() << "'"; return; } } cameras_.push_back(std::move(camera)); unsigned int index = cameras_.size() - 1; /* Report the addition to the public signal */ CameraManager *const o = LIBCAMERA_O_PTR(); o->cameraAdded.emit(cameras_[index]); } /** * \brief Remove a camera from the camera manager * \param[in] camera The camera to be removed * * This function is called by pipeline handlers to unregister cameras from the * camera manager. Unregistered cameras won't be reported anymore by the * cameras() and get() calls, but references may still exist in applications. * * \context This function shall be called from the CameraManager thread. */ void CameraManager::Private::removeCamera(std::shared_ptr<Camera> camera) { ASSERT(Thread::current() == this); MutexLocker locker(mutex_); auto iter = std::find_if(cameras_.begin(), cameras_.end(), [camera](std::shared_ptr<Camera> &c) { return c.get() == camera.get(); }); if (iter == cameras_.end()) return; LOG(Camera, Debug) << "Unregistering camera '" << camera->id() << "'"; cameras_.erase(iter); /* Report the removal to the public signal */ CameraManager *const o = LIBCAMERA_O_PTR(); o->cameraRemoved.emit(camera); } /** * \class CameraManager * \brief Provide access and manage all cameras in the system * * The camera manager is the entry point to libcamera. It enumerates devices, * associates them with pipeline managers, and provides access to the cameras * in the system to applications. The manager owns all Camera objects and * handles hot-plugging and hot-unplugging to manage the lifetime of cameras. * * To interact with libcamera, an application starts by creating a camera * manager instance. Only a single instance of the camera manager may exist at * a time. Attempting to create a second instance without first deleting the * existing instance results in undefined behaviour. * * The manager is initially stopped, and shall be started with start(). This * will enumerate all the cameras present in the system, which can then be * listed with list() and retrieved with get(). * * Cameras are shared through std::shared_ptr<>, ensuring that a camera will * stay valid until the last reference is released without requiring any special * action from the application. Once the application has released all the * references it held to cameras, the camera manager can be stopped with * stop(). */ CameraManager *CameraManager::self_ = nullptr; CameraManager::CameraManager() : Extensible(std::make_unique<CameraManager::Private>()) { if (self_) LOG(Camera, Fatal) << "Multiple CameraManager objects are not allowed"; self_ = this; } /** * \brief Destroy the camera manager * * Destroying the camera manager stops it if it is currently running. */ CameraManager::~CameraManager() { stop(); self_ = nullptr; } /** * \brief Start the camera manager * * Start the camera manager and enumerate all devices in the system. Once * the start has been confirmed the user is free to list and otherwise * interact with cameras in the system until either the camera manager * is stopped or the camera is unplugged from the system. * * \return 0 on success or a negative error code otherwise */ int CameraManager::start() { LOG(Camera, Info) << "libcamera " << version_; int ret = _d()->start(); if (ret) LOG(Camera, Error) << "Failed to start camera manager: " << strerror(-ret); return ret; } /** * \brief Stop the camera manager * * Before stopping the camera manager the caller is responsible for making * sure all cameras provided by the manager are returned to the manager. * * After the manager has been stopped no resource provided by the camera * manager should be consider valid or functional even if they for one * reason or another have yet to be deleted. */ void CameraManager::stop() { Private *const d = _d(); d->exit(); d->wait(); } /** * \fn CameraManager::cameras() * \brief Retrieve all available cameras * * Before calling this function the caller is responsible for ensuring that * the camera manager is running. * * \context This function is \threadsafe. * * \return List of all available cameras */ std::vector<std::shared_ptr<Camera>> CameraManager::cameras() const { const Private *const d = _d(); MutexLocker locker(d->mutex_); return d->cameras_; } /** * \brief Get a camera based on ID * \param[in] id ID of camera to get * * Before calling this function the caller is responsible for ensuring that * the camera manager is running. * * \context This function is \threadsafe. * * \return Shared pointer to Camera object or nullptr if camera not found */ std::shared_ptr<Camera> CameraManager::get(const std::string &id) { Private *const d = _d(); MutexLocker locker(d->mutex_); for (std::shared_ptr<Camera> camera : d->cameras_) { if (camera->id() == id) return camera; } return nullptr; } /** * \var CameraManager::cameraAdded * \brief Notify of a new camera added to the system * * This signal is emitted when a new camera is detected and successfully handled * by the camera manager. The notification occurs alike for cameras detected * when the manager is started with start() or when new cameras are later * connected to the system. When the signal is emitted the new camera is already * available from the list of cameras(). * * The signal is emitted from the CameraManager thread. Applications shall * minimize the time spent in the signal handler and shall in particular not * perform any blocking operation. */ /** * \var CameraManager::cameraRemoved * \brief Notify of a new camera removed from the system * * This signal is emitted when a camera is removed from the system. When the * signal is emitted the camera is not available from the list of cameras() * anymore. * * The signal is emitted from the CameraManager thread. Applications shall * minimize the time spent in the signal handler and shall in particular not * perform any blocking operation. */ /** * \fn const std::string &CameraManager::version() * \brief Retrieve the libcamera version string * \context This function is \a threadsafe. * \return The libcamera version string */ } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/property_ids_draft.yaml
# SPDX-License-Identifier: LGPL-2.1-or-later # # Copyright (C) 2019, Google Inc. # %YAML 1.1 --- vendor: draft controls: - ColorFilterArrangement: type: int32_t vendor: draft description: | The arrangement of color filters on sensor; represents the colors in the top-left 2x2 section of the sensor, in reading order. Currently identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. enum: - name: RGGB value: 0 description: RGGB Bayer pattern - name: GRBG value: 1 description: GRBG Bayer pattern - name: GBRG value: 2 description: GBRG Bayer pattern - name: BGGR value: 3 description: BGGR Bayer pattern - name: RGB value: 4 description: | Sensor is not Bayer; output has 3 16-bit values for each pixel, instead of just 1 16-bit value per pixel. - name: MONO value: 5 description: | Sensor is not Bayer; output consists of a single colour channel. ...
0
repos/libcamera/src
repos/libcamera/src/libcamera/ipa_pub_key.cpp.in
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Laurent Pinchart <[email protected]> * * IPA module signing public key * * This file is auto-generated. Do not edit. */ #include "libcamera/internal/ipa_manager.h" namespace libcamera { #if HAVE_IPA_PUBKEY const uint8_t IPAManager::publicKeyData_[] = { ${ipa_key} }; const PubKey IPAManager::pubKey_{ { IPAManager::publicKeyData_ } }; #endif } /* namespace libcamera */
0
repos/libcamera/src
repos/libcamera/src/libcamera/color_space.cpp
/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2021, Raspberry Pi Ltd * * color spaces. */ #include <libcamera/color_space.h> #include <algorithm> #include <array> #include <map> #include <sstream> #include <utility> #include <vector> #include <libcamera/base/utils.h> #include "libcamera/internal/formats.h" /** * \file color_space.h * \brief Class and enums to represent color spaces */ namespace libcamera { /** * \class ColorSpace * \brief Class to describe a color space * * The ColorSpace class defines the color primaries, the transfer function, * the Y'CbCr encoding associated with the color space, and the range * (sometimes also referred to as the quantisation) of the color space. * * Certain combinations of these fields form well-known standard color * spaces such as "sRGB" or "Rec709". * * In the strictest sense a "color space" formally only refers to the * color primaries and white point. Here, however, the ColorSpace class * adopts the common broader usage that includes the transfer function, * Y'CbCr encoding method and quantisation. * * More information on color spaces is available in the V4L2 documentation, see * in particular * * - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-srgb">sRGB</a> * - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-jpeg">JPEG</a> * - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-smpte-170m">SMPTE 170M</a> * - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-rec709">Rec.709</a> * - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-bt2020">Rec.2020</a> * * Note that there is no guarantee of a 1:1 mapping between color space names * and definitions in libcamera and V4L2. Two notable differences are * * - The sRGB libcamera color space is defined for RGB formats only with no * Y'CbCr encoding and a full quantization range, while the V4L2 SRGB color * space has a Y'CbCr encoding and a limited quantization range. * - The sYCC libcamera color space is called JPEG in V4L2 due to historical * reasons. * * \todo Define the color space fully in the libcamera API to avoid referencing * V4L2 */ /** * \enum ColorSpace::Primaries * \brief The color primaries for this color space * * \var ColorSpace::Primaries::Raw * \brief These are raw colors directly from a sensor, the primaries are * unspecified * * \var ColorSpace::Primaries::Smpte170m * \brief SMPTE 170M color primaries * * \var ColorSpace::Primaries::Rec709 * \brief Rec.709 color primaries * * \var ColorSpace::Primaries::Rec2020 * \brief Rec.2020 color primaries */ /** * \enum ColorSpace::TransferFunction * \brief The transfer function used for this color space * * \var ColorSpace::TransferFunction::Linear * \brief This color space uses a linear (identity) transfer function * * \var ColorSpace::TransferFunction::Srgb * \brief sRGB transfer function * * \var ColorSpace::TransferFunction::Rec709 * \brief Rec.709 transfer function */ /** * \enum ColorSpace::YcbcrEncoding * \brief The Y'CbCr encoding * * \var ColorSpace::YcbcrEncoding::None * \brief There is no defined Y'CbCr encoding (used for non-YUV formats) * * \var ColorSpace::YcbcrEncoding::Rec601 * \brief Rec.601 Y'CbCr encoding * * \var ColorSpace::YcbcrEncoding::Rec709 * \brief Rec.709 Y'CbCr encoding * * \var ColorSpace::YcbcrEncoding::Rec2020 * \brief Rec.2020 Y'CbCr encoding */ /** * \enum ColorSpace::Range * \brief The range (sometimes "quantisation") for this color space * * \var ColorSpace::Range::Full * \brief This color space uses full range pixel values * * \var ColorSpace::Range::Limited * \brief This color space uses limited range pixel values, being * 16 to 235 for Y' and 16 to 240 for Cb and Cr (8 bits per sample) * or 64 to 940 for Y' and 16 to 960 for Cb and Cr (10 bits) */ /** * \fn ColorSpace::ColorSpace(Primaries p, TransferFunction t, Encoding e, Range r) * \brief Construct a ColorSpace from explicit values * \param[in] p The color primaries * \param[in] t The transfer function for the color space * \param[in] e The Y'CbCr encoding * \param[in] r The range of the pixel values in this color space */ /** * \brief A constant representing a raw color space (from a sensor) */ const ColorSpace ColorSpace::Raw = { Primaries::Raw, TransferFunction::Linear, YcbcrEncoding::None, Range::Full }; /** * \brief A constant representing the sRGB color space (RGB formats only) */ const ColorSpace ColorSpace::Srgb = { Primaries::Rec709, TransferFunction::Srgb, YcbcrEncoding::None, Range::Full }; /** * \brief A constant representing the sYCC color space, typically used for * encoding JPEG images */ const ColorSpace ColorSpace::Sycc = { Primaries::Rec709, TransferFunction::Srgb, YcbcrEncoding::Rec601, Range::Full }; /** * \brief A constant representing the SMPTE170M color space */ const ColorSpace ColorSpace::Smpte170m = { Primaries::Smpte170m, TransferFunction::Rec709, YcbcrEncoding::Rec601, Range::Limited }; /** * \brief A constant representing the Rec.709 color space */ const ColorSpace ColorSpace::Rec709 = { Primaries::Rec709, TransferFunction::Rec709, YcbcrEncoding::Rec709, Range::Limited }; /** * \brief A constant representing the Rec.2020 color space */ const ColorSpace ColorSpace::Rec2020 = { Primaries::Rec2020, TransferFunction::Rec709, YcbcrEncoding::Rec2020, Range::Limited }; /** * \var ColorSpace::primaries * \brief The color primaries of this color space */ /** * \var ColorSpace::transferFunction * \brief The transfer function used by this color space */ /** * \var ColorSpace::ycbcrEncoding * \brief The Y'CbCr encoding used by this color space */ /** * \var ColorSpace::range * \brief The pixel range used with by color space */ namespace { const std::array<std::pair<ColorSpace, const char *>, 6> colorSpaceNames = { { { ColorSpace::Raw, "RAW" }, { ColorSpace::Srgb, "sRGB" }, { ColorSpace::Sycc, "sYCC" }, { ColorSpace::Smpte170m, "SMPTE170M" }, { ColorSpace::Rec709, "Rec709" }, { ColorSpace::Rec2020, "Rec2020" }, } }; const std::map<ColorSpace::Primaries, std::string> primariesNames = { { ColorSpace::Primaries::Raw, "RAW" }, { ColorSpace::Primaries::Smpte170m, "SMPTE170M" }, { ColorSpace::Primaries::Rec709, "Rec709" }, { ColorSpace::Primaries::Rec2020, "Rec2020" }, }; const std::map<ColorSpace::TransferFunction, std::string> transferNames = { { ColorSpace::TransferFunction::Linear, "Linear" }, { ColorSpace::TransferFunction::Srgb, "sRGB" }, { ColorSpace::TransferFunction::Rec709, "Rec709" }, }; const std::map<ColorSpace::YcbcrEncoding, std::string> encodingNames = { { ColorSpace::YcbcrEncoding::None, "None" }, { ColorSpace::YcbcrEncoding::Rec601, "Rec601" }, { ColorSpace::YcbcrEncoding::Rec709, "Rec709" }, { ColorSpace::YcbcrEncoding::Rec2020, "Rec2020" }, }; const std::map<ColorSpace::Range, std::string> rangeNames = { { ColorSpace::Range::Full, "Full" }, { ColorSpace::Range::Limited, "Limited" }, }; } /* namespace */ /** * \brief Assemble and return a readable string representation of the * ColorSpace * * If the color space matches a standard ColorSpace (such as ColorSpace::Sycc) * then the short name of the color space ("sYCC") is returned. Otherwise * the four constituent parts of the ColorSpace are assembled into a longer * string. * * \return A string describing the ColorSpace */ std::string ColorSpace::toString() const { /* Print out a brief name only for standard color spaces. */ auto it = std::find_if(colorSpaceNames.begin(), colorSpaceNames.end(), [this](const auto &item) { return *this == item.first; }); if (it != colorSpaceNames.end()) return std::string(it->second); /* Assemble a name made of the constituent fields. */ auto itPrimaries = primariesNames.find(primaries); std::string primariesName = itPrimaries == primariesNames.end() ? "Invalid" : itPrimaries->second; auto itTransfer = transferNames.find(transferFunction); std::string transferName = itTransfer == transferNames.end() ? "Invalid" : itTransfer->second; auto itEncoding = encodingNames.find(ycbcrEncoding); std::string encodingName = itEncoding == encodingNames.end() ? "Invalid" : itEncoding->second; auto itRange = rangeNames.find(range); std::string rangeName = itRange == rangeNames.end() ? "Invalid" : itRange->second; std::stringstream ss; ss << primariesName << "/" << transferName << "/" << encodingName << "/" << rangeName; return ss.str(); } /** * \brief Assemble and return a readable string representation of an * optional ColorSpace * * This is a convenience helper to easily obtain a string representation * for a ColorSpace in parts of the libcamera API where it is stored in a * std::optional<>. If the ColorSpace is set, this function returns * \a colorSpace.toString(), otherwise it returns "Unset". * * \return A string describing the optional ColorSpace */ std::string ColorSpace::toString(const std::optional<ColorSpace> &colorSpace) { if (!colorSpace) return "Unset"; return colorSpace->toString(); } /** * \brief Construct a color space from a string * \param[in] str The string * * The string \a str can contain the name of a well-known color space, or be * made of the four color space components separated by a '/' character, ordered * as * * \verbatim primaries '/' transferFunction '/' ycbcrEncoding '/' range \endverbatim * * Any failure to parse the string, either because it doesn't match the expected * format, or because the one of the names isn't recognized, will cause this * function to return std::nullopt. * * \return The ColorSpace corresponding to the string, or std::nullopt if the * string doesn't describe a known color space */ std::optional<ColorSpace> ColorSpace::fromString(const std::string &str) { /* First search for a standard color space name match. */ auto itColorSpace = std::find_if(colorSpaceNames.begin(), colorSpaceNames.end(), [&str](const auto &item) { return str == item.second; }); if (itColorSpace != colorSpaceNames.end()) return itColorSpace->first; /* * If not found, the string must contain the four color space * components separated by a '/' character. */ const auto &split = utils::split(str, "/"); std::vector<std::string> components{ split.begin(), split.end() }; if (components.size() != 4) return std::nullopt; ColorSpace colorSpace = ColorSpace::Raw; /* Color primaries */ auto itPrimaries = std::find_if(primariesNames.begin(), primariesNames.end(), [&components](const auto &item) { return components[0] == item.second; }); if (itPrimaries == primariesNames.end()) return std::nullopt; colorSpace.primaries = itPrimaries->first; /* Transfer function */ auto itTransfer = std::find_if(transferNames.begin(), transferNames.end(), [&components](const auto &item) { return components[1] == item.second; }); if (itTransfer == transferNames.end()) return std::nullopt; colorSpace.transferFunction = itTransfer->first; /* YCbCr encoding */ auto itEncoding = std::find_if(encodingNames.begin(), encodingNames.end(), [&components](const auto &item) { return components[2] == item.second; }); if (itEncoding == encodingNames.end()) return std::nullopt; colorSpace.ycbcrEncoding = itEncoding->first; /* Quantization range */ auto itRange = std::find_if(rangeNames.begin(), rangeNames.end(), [&components](const auto &item) { return components[3] == item.second; }); if (itRange == rangeNames.end()) return std::nullopt; colorSpace.range = itRange->first; return colorSpace; } /** * \brief Adjust the color space to match a pixel format * \param[in] format The pixel format * * Not all combinations of pixel formats and color spaces make sense. For * instance, nobody uses a limited quantization range with raw Bayer formats, * and the YcbcrEncoding::None encoding isn't valid for YUV formats. This * function adjusts the ColorSpace to make it compatible with the given \a * format, by applying the following rules: * * - The color space for RAW formats must be Raw. * - The Y'CbCr encoding and quantization range for RGB formats must be * YcbcrEncoding::None and Range::Full respectively. * - The Y'CbCr encoding for YUV formats must not be YcbcrEncoding::None. The * best encoding is in that case guessed based on the primaries and transfer * function. * * \return True if the color space has been adjusted, or false if it was * already compatible with the format and hasn't been changed */ bool ColorSpace::adjust(PixelFormat format) { const PixelFormatInfo &info = PixelFormatInfo::info(format); bool adjusted = false; switch (info.colourEncoding) { case PixelFormatInfo::ColourEncodingRAW: /* Raw formats must use the raw color space. */ if (*this != ColorSpace::Raw) { *this = ColorSpace::Raw; adjusted = true; } break; case PixelFormatInfo::ColourEncodingRGB: /* * RGB formats can't have a Y'CbCr encoding, and must use full * range quantization. */ if (ycbcrEncoding != YcbcrEncoding::None) { ycbcrEncoding = YcbcrEncoding::None; adjusted = true; } if (range != Range::Full) { range = Range::Full; adjusted = true; } break; case PixelFormatInfo::ColourEncodingYUV: if (ycbcrEncoding != YcbcrEncoding::None) break; /* * YUV formats must have a Y'CbCr encoding. Infer the most * probable option from the transfer function and primaries. */ switch (transferFunction) { case TransferFunction::Linear: /* * Linear YUV is not used in any standard color space, * pick the widely supported and used Rec601 as default. */ ycbcrEncoding = YcbcrEncoding::Rec601; break; case TransferFunction::Rec709: switch (primaries) { /* Raw should never happen. */ case Primaries::Raw: case Primaries::Smpte170m: ycbcrEncoding = YcbcrEncoding::Rec601; break; case Primaries::Rec709: ycbcrEncoding = YcbcrEncoding::Rec709; break; case Primaries::Rec2020: ycbcrEncoding = YcbcrEncoding::Rec2020; break; } break; case TransferFunction::Srgb: /* * Only the sYCC color space uses the sRGB transfer * function, the corresponding encoding is Rec601. */ ycbcrEncoding = YcbcrEncoding::Rec601; break; } adjusted = true; break; } return adjusted; } /** * \brief Compare color spaces for equality * \return True if the two color spaces are identical, false otherwise */ bool operator==(const ColorSpace &lhs, const ColorSpace &rhs) { return lhs.primaries == rhs.primaries && lhs.transferFunction == rhs.transferFunction && lhs.ycbcrEncoding == rhs.ycbcrEncoding && lhs.range == rhs.range; } /** * \fn bool operator!=(const ColorSpace &lhs, const ColorSpace &rhs) * \brief Compare color spaces for inequality * \return True if the two color spaces are not identical, false otherwise */ } /* namespace libcamera */