Unnamed: 0
int64 0
0
| repo_id
stringlengths 5
186
| file_path
stringlengths 15
223
| content
stringlengths 1
32.8M
⌀ |
---|---|---|---|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/saturation.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2022 Raspberry Pi Ltd
*
* Saturation control algorithm
*/
#include "saturation.h"
#include <libcamera/base/log.h>
#include "saturation_status.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiSaturation)
#define NAME "rpi.saturation"
Saturation::Saturation(Controller *controller)
: Algorithm(controller)
{
}
char const *Saturation::name() const
{
return NAME;
}
int Saturation::read(const libcamera::YamlObject ¶ms)
{
config_.shiftR = params["shift_r"].get<uint8_t>(0);
config_.shiftG = params["shift_g"].get<uint8_t>(0);
config_.shiftB = params["shift_b"].get<uint8_t>(0);
return 0;
}
void Saturation::initialise()
{
}
void Saturation::prepare(Metadata *imageMetadata)
{
SaturationStatus saturation;
saturation.shiftR = config_.shiftR;
saturation.shiftG = config_.shiftG;
saturation.shiftB = config_.shiftB;
imageMetadata->set("saturation.status", saturation);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
{
return (Algorithm *)new Saturation(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/hdr.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2023 Raspberry Pi Ltd
*
* HDR control algorithm
*/
#include "hdr.h"
#include <cmath>
#include <libcamera/base/log.h>
#include "../agc_status.h"
#include "../alsc_status.h"
#include "../stitch_status.h"
#include "../tonemap_status.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiHdr)
#define NAME "rpi.hdr"
void HdrConfig::read(const libcamera::YamlObject ¶ms, const std::string &modeName)
{
name = modeName;
if (!params.contains("cadence"))
LOG(RPiHdr, Fatal) << "No cadence for HDR mode " << name;
cadence = params["cadence"].getList<unsigned int>().value();
if (cadence.empty())
LOG(RPiHdr, Fatal) << "Empty cadence in HDR mode " << name;
/*
* In the JSON file it's easier to use the channel name as the key, but
* for us it's convenient to swap them over.
*/
for (const auto &[k, v] : params["channel_map"].asDict())
channelMap[v.get<unsigned int>().value()] = k;
/* Lens shading related parameters. */
if (params.contains("spatial_gain_curve")) {
spatialGainCurve = params["spatial_gain_curve"].get<ipa::Pwl>(ipa::Pwl{});
} else if (params.contains("spatial_gain")) {
double spatialGain = params["spatial_gain"].get<double>(2.0);
spatialGainCurve.append(0.0, spatialGain);
spatialGainCurve.append(0.01, spatialGain);
spatialGainCurve.append(0.06, 1.0); /* maybe make this programmable? */
spatialGainCurve.append(1.0, 1.0);
}
diffusion = params["diffusion"].get<unsigned int>(3);
/* Clip to an arbitrary limit just to stop typos from killing the system! */
const unsigned int MAX_DIFFUSION = 15;
if (diffusion > MAX_DIFFUSION) {
diffusion = MAX_DIFFUSION;
LOG(RPiHdr, Warning) << "Diffusion value clipped to " << MAX_DIFFUSION;
}
/* Read any tonemap parameters. */
tonemapEnable = params["tonemap_enable"].get<int>(0);
detailConstant = params["detail_constant"].get<uint16_t>(0);
detailSlope = params["detail_slope"].get<double>(0.0);
iirStrength = params["iir_strength"].get<double>(8.0);
strength = params["strength"].get<double>(1.5);
if (tonemapEnable)
tonemap = params["tonemap"].get<ipa::Pwl>(ipa::Pwl{});
speed = params["speed"].get<double>(1.0);
if (params.contains("hi_quantile_targets")) {
hiQuantileTargets = params["hi_quantile_targets"].getList<double>().value();
if (hiQuantileTargets.empty() || hiQuantileTargets.size() % 2)
LOG(RPiHdr, Fatal) << "hi_quantile_targets much be even and non-empty";
} else
hiQuantileTargets = { 0.95, 0.65, 0.5, 0.28, 0.3, 0.25 };
hiQuantileMaxGain = params["hi_quantile_max_gain"].get<double>(1.6);
if (params.contains("quantile_targets")) {
quantileTargets = params["quantile_targets"].getList<double>().value();
if (quantileTargets.empty() || quantileTargets.size() % 2)
LOG(RPiHdr, Fatal) << "quantile_targets much be even and non-empty";
} else
quantileTargets = { 0.2, 0.03, 1.0, 0.15 };
powerMin = params["power_min"].get<double>(0.65);
powerMax = params["power_max"].get<double>(1.0);
if (params.contains("contrast_adjustments")) {
contrastAdjustments = params["contrast_adjustments"].getList<double>().value();
} else
contrastAdjustments = { 0.5, 0.75 };
/* Read any stitch parameters. */
stitchEnable = params["stitch_enable"].get<int>(0);
thresholdLo = params["threshold_lo"].get<uint16_t>(50000);
motionThreshold = params["motion_threshold"].get<double>(0.005);
diffPower = params["diff_power"].get<uint8_t>(13);
if (diffPower > 15)
LOG(RPiHdr, Fatal) << "Bad diff_power value in HDR mode " << name;
}
Hdr::Hdr(Controller *controller)
: HdrAlgorithm(controller)
{
regions_ = controller->getHardwareConfig().awbRegions;
numRegions_ = regions_.width * regions_.height;
gains_[0].resize(numRegions_, 1.0);
gains_[1].resize(numRegions_, 1.0);
}
char const *Hdr::name() const
{
return NAME;
}
int Hdr::read(const libcamera::YamlObject ¶ms)
{
/* Make an "HDR off" mode by default so that tuning files don't have to. */
HdrConfig &offMode = config_["Off"];
offMode.name = "Off";
offMode.cadence = { 0 };
offMode.channelMap[0] = "None";
status_.mode = offMode.name;
delayedStatus_.mode = offMode.name;
/*
* But we still allow the tuning file to override the "Off" mode if it wants.
* For example, maybe an application will make channel 0 be the "short"
* channel, in order to apply other AGC controls to it.
*/
for (const auto &[key, value] : params.asDict())
config_[key].read(value, key);
return 0;
}
int Hdr::setMode(std::string const &mode)
{
/* Always validate the mode, so it can be used later without checking. */
auto it = config_.find(mode);
if (it == config_.end()) {
LOG(RPiHdr, Warning) << "No such HDR mode " << mode;
return -1;
}
status_.mode = it->second.name;
return 0;
}
std::vector<unsigned int> Hdr::getChannels() const
{
return config_.at(status_.mode).cadence;
}
void Hdr::updateAgcStatus(Metadata *metadata)
{
std::scoped_lock lock(*metadata);
AgcStatus *agcStatus = metadata->getLocked<AgcStatus>("agc.status");
if (agcStatus) {
HdrConfig &hdrConfig = config_[status_.mode];
auto it = hdrConfig.channelMap.find(agcStatus->channel);
if (it != hdrConfig.channelMap.end()) {
status_.channel = it->second;
agcStatus->hdr = status_;
} else
LOG(RPiHdr, Warning) << "Channel " << agcStatus->channel
<< " not found in mode " << status_.mode;
} else
LOG(RPiHdr, Warning) << "No agc.status found";
}
void Hdr::switchMode([[maybe_unused]] CameraMode const &cameraMode, Metadata *metadata)
{
updateAgcStatus(metadata);
delayedStatus_ = status_;
}
void Hdr::prepare(Metadata *imageMetadata)
{
AgcStatus agcStatus;
if (!imageMetadata->get<AgcStatus>("agc.delayed_status", agcStatus))
delayedStatus_ = agcStatus.hdr;
auto it = config_.find(delayedStatus_.mode);
if (it == config_.end()) {
/* Shouldn't be possible. There would be nothing we could do. */
LOG(RPiHdr, Warning) << "Unexpected HDR mode " << delayedStatus_.mode;
return;
}
HdrConfig &config = it->second;
if (config.spatialGainCurve.empty())
return;
AlscStatus alscStatus{}; /* some compilers seem to require the braces */
if (imageMetadata->get<AlscStatus>("alsc.status", alscStatus)) {
LOG(RPiHdr, Warning) << "No ALSC status";
return;
}
/* The final gains ended up in the odd or even array, according to diffusion. */
std::vector<double> &gains = gains_[config.diffusion & 1];
for (unsigned int i = 0; i < numRegions_; i++) {
alscStatus.r[i] *= gains[i];
alscStatus.g[i] *= gains[i];
alscStatus.b[i] *= gains[i];
}
imageMetadata->set("alsc.status", alscStatus);
}
bool Hdr::updateTonemap([[maybe_unused]] StatisticsPtr &stats, HdrConfig &config)
{
/* When there's a change of HDR mode we start over with a new tonemap curve. */
if (delayedStatus_.mode != previousMode_) {
previousMode_ = delayedStatus_.mode;
tonemap_ = ipa::Pwl();
}
/* No tonemapping. No need to output a tonemap.status. */
if (!config.tonemapEnable)
return false;
/* If an explicit tonemap was given, use it. */
if (!config.tonemap.empty()) {
tonemap_ = config.tonemap;
return true;
}
/*
* We wouldn't update the tonemap on short frames when in multi-exposure mode. But
* we still need to output the most recent tonemap. Possibly we should make the
* config indicate the channels for which we should update the tonemap?
*/
if (delayedStatus_.mode == "MultiExposure" && delayedStatus_.channel != "short")
return true;
/*
* Create a tonemap dynamically. We have three ingredients.
*
* 1. We have a list of "hi quantiles" and "targets". We use these to judge if
* the image does seem to be reasonably saturated. If it isn't, we calculate
* a gain that we will feed as a linear factor into the tonemap generation.
* This prevents unsaturated images from beoming quite so "flat".
*
* 2. We have a list of quantile/target pairs for the bottom of the histogram.
* We use these to calculate how much gain we must apply to the bottom of the
* tonemap. We apply this gain as a power curve so as not to blow out the top
* end.
*
* 3. Finally, when we generate the tonemap, we have some contrast adjustments
* for the bottom because we know that power curves can start quite steeply and
* cause a washed-out look.
*/
/* Compute the linear gain from the headroom for saturation at the top. */
double gain = 10; /* arbitrary, but hiQuantileMaxGain will clamp it later */
for (unsigned int i = 0; i < config.hiQuantileTargets.size(); i += 2) {
double quantile = config.hiQuantileTargets[i];
double target = config.hiQuantileTargets[i + 1];
double value = stats->yHist.interQuantileMean(quantile, 1.0) / 1024.0;
double newGain = target / (value + 0.01);
gain = std::min(gain, newGain);
}
gain = std::clamp(gain, 1.0, config.hiQuantileMaxGain);
/* Compute the power curve from the amount of gain needed at the bottom. */
double min_power = 2; /* arbitrary, but config.powerMax will clamp it later */
for (unsigned int i = 0; i < config.quantileTargets.size(); i += 2) {
double quantile = config.quantileTargets[i];
double target = config.quantileTargets[i + 1];
double value = stats->yHist.interQuantileMean(0, quantile) / 1024.0;
value = std::min(value * gain, 1.0);
double power = log(target + 1e-6) / log(value + 1e-6);
min_power = std::min(min_power, power);
}
double power = std::clamp(min_power, config.powerMin, config.powerMax);
/* Generate the tonemap, including the contrast adjustment factors. */
libcamera::ipa::Pwl tonemap;
tonemap.append(0, 0);
for (unsigned int i = 0; i <= 6; i++) {
double x = 1 << (i + 9); /* x loops from 512 to 32768 inclusive */
double y = pow(std::min(x * gain, 65535.0) / 65536.0, power) * 65536;
if (i < config.contrastAdjustments.size())
y *= config.contrastAdjustments[i];
if (!tonemap_.empty())
y = y * config.speed + tonemap_.eval(x) * (1 - config.speed);
tonemap.append(x, y);
}
tonemap.append(65535, 65535);
tonemap_ = tonemap;
return true;
}
static void averageGains(std::vector<double> &src, std::vector<double> &dst, const Size &size)
{
#define IDX(y, x) ((y)*size.width + (x))
unsigned int lastCol = size.width - 1; /* index of last column */
unsigned int preLastCol = lastCol - 1; /* and the column before that */
unsigned int lastRow = size.height - 1; /* index of last row */
unsigned int preLastRow = lastRow - 1; /* and the row before that */
/* Corners first. */
dst[IDX(0, 0)] = (src[IDX(0, 0)] + src[IDX(0, 1)] + src[IDX(1, 0)]) / 3;
dst[IDX(0, lastCol)] = (src[IDX(0, lastCol)] + src[IDX(0, preLastCol)] + src[IDX(1, lastCol)]) / 3;
dst[IDX(lastRow, 0)] = (src[IDX(lastRow, 0)] + src[IDX(lastRow, 1)] + src[IDX(preLastRow, 0)]) / 3;
dst[IDX(lastRow, lastCol)] = (src[IDX(lastRow, lastCol)] + src[IDX(lastRow, preLastCol)] +
src[IDX(preLastRow, lastCol)]) /
3;
/* Now the edges. */
for (unsigned int i = 1; i < lastCol; i++) {
dst[IDX(0, i)] = (src[IDX(0, i - 1)] + src[IDX(0, i)] + src[IDX(0, i + 1)] + src[IDX(1, i)]) / 4;
dst[IDX(lastRow, i)] = (src[IDX(lastRow, i - 1)] + src[IDX(lastRow, i)] +
src[IDX(lastRow, i + 1)] + src[IDX(preLastRow, i)]) /
4;
}
for (unsigned int i = 1; i < lastRow; i++) {
dst[IDX(i, 0)] = (src[IDX(i - 1, 0)] + src[IDX(i, 0)] + src[IDX(i + 1, 0)] + src[IDX(i, 1)]) / 4;
dst[IDX(i, 31)] = (src[IDX(i - 1, lastCol)] + src[IDX(i, lastCol)] +
src[IDX(i + 1, lastCol)] + src[IDX(i, preLastCol)]) /
4;
}
/* Finally the interior. */
for (unsigned int j = 1; j < lastRow; j++) {
for (unsigned int i = 1; i < lastCol; i++) {
dst[IDX(j, i)] = (src[IDX(j - 1, i)] + src[IDX(j, i - 1)] + src[IDX(j, i)] +
src[IDX(j, i + 1)] + src[IDX(j + 1, i)]) /
5;
}
}
}
void Hdr::updateGains(StatisticsPtr &stats, HdrConfig &config)
{
if (config.spatialGainCurve.empty())
return;
/* When alternating exposures, only compute these gains for the short frame. */
if (delayedStatus_.mode == "MultiExposure" && delayedStatus_.channel != "short")
return;
for (unsigned int i = 0; i < numRegions_; i++) {
auto ®ion = stats->awbRegions.get(i);
unsigned int counted = region.counted;
counted += (counted == 0); /* avoid div by zero */
double r = region.val.rSum / counted;
double g = region.val.gSum / counted;
double b = region.val.bSum / counted;
double brightness = std::max({ r, g, b }) / 65535;
gains_[0][i] = config.spatialGainCurve.eval(brightness);
}
/* Ping-pong between the two gains_ buffers. */
for (unsigned int i = 0; i < config.diffusion; i++)
averageGains(gains_[i & 1], gains_[(i & 1) ^ 1], regions_);
}
void Hdr::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
/* Note what HDR channel this frame will be once it comes back to us. */
updateAgcStatus(imageMetadata);
/*
* Now figure out what HDR channel this frame is. It should be available in the
* agc.delayed_status, unless this is an early frame after a mode switch, in which
* case delayedStatus_ should be right.
*/
AgcStatus agcStatus;
if (!imageMetadata->get<AgcStatus>("agc.delayed_status", agcStatus))
delayedStatus_ = agcStatus.hdr;
auto it = config_.find(delayedStatus_.mode);
if (it == config_.end()) {
/* Shouldn't be possible. There would be nothing we could do. */
LOG(RPiHdr, Warning) << "Unexpected HDR mode " << delayedStatus_.mode;
return;
}
HdrConfig &config = it->second;
/* Update the spatially varying gains. They get written in prepare(). */
updateGains(stats, config);
if (updateTonemap(stats, config)) {
/* Add tonemap.status metadata. */
TonemapStatus tonemapStatus;
tonemapStatus.detailConstant = config.detailConstant;
tonemapStatus.detailSlope = config.detailSlope;
tonemapStatus.iirStrength = config.iirStrength;
tonemapStatus.strength = config.strength;
tonemapStatus.tonemap = tonemap_;
imageMetadata->set("tonemap.status", tonemapStatus);
}
if (config.stitchEnable) {
/* Add stitch.status metadata. */
StitchStatus stitchStatus;
stitchStatus.diffPower = config.diffPower;
stitchStatus.motionThreshold = config.motionThreshold;
stitchStatus.thresholdLo = config.thresholdLo;
imageMetadata->set("stitch.status", stitchStatus);
}
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Hdr(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/agc_channel.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2023, Raspberry Pi Ltd
*
* AGC/AEC control algorithm
*/
#pragma once
#include <map>
#include <string>
#include <vector>
#include <libcamera/base/utils.h>
#include <libipa/pwl.h>
#include "../agc_status.h"
#include "../awb_status.h"
#include "../controller.h"
/* This is our implementation of AGC. */
namespace RPiController {
using AgcChannelTotalExposures = std::vector<libcamera::utils::Duration>;
struct AgcMeteringMode {
std::vector<double> weights;
int read(const libcamera::YamlObject ¶ms);
};
struct AgcExposureMode {
std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
int read(const libcamera::YamlObject ¶ms);
};
struct AgcConstraint {
enum class Bound { LOWER = 0,
UPPER = 1 };
Bound bound;
double qLo;
double qHi;
libcamera::ipa::Pwl yTarget;
int read(const libcamera::YamlObject ¶ms);
};
typedef std::vector<AgcConstraint> AgcConstraintMode;
struct AgcChannelConstraint {
enum class Bound { LOWER = 0,
UPPER = 1 };
Bound bound;
unsigned int channel;
double factor;
int read(const libcamera::YamlObject ¶ms);
};
struct AgcConfig {
int read(const libcamera::YamlObject ¶ms);
std::map<std::string, AgcMeteringMode> meteringModes;
std::map<std::string, AgcExposureMode> exposureModes;
std::map<std::string, AgcConstraintMode> constraintModes;
std::vector<AgcChannelConstraint> channelConstraints;
libcamera::ipa::Pwl yTarget;
double speed;
uint16_t startupFrames;
unsigned int convergenceFrames;
double maxChange;
double minChange;
double fastReduceThreshold;
double speedUpThreshold;
std::string defaultMeteringMode;
std::string defaultExposureMode;
std::string defaultConstraintMode;
double baseEv;
libcamera::utils::Duration defaultExposureTime;
double defaultAnalogueGain;
double stableRegion;
bool desaturate;
};
class AgcChannel
{
public:
AgcChannel();
int read(const libcamera::YamlObject ¶ms,
const Controller::HardwareConfig &hardwareConfig);
unsigned int getConvergenceFrames() const;
std::vector<double> const &getWeights() const;
void setEv(double ev);
void setFlickerPeriod(libcamera::utils::Duration flickerPeriod);
void setMaxShutter(libcamera::utils::Duration maxShutter);
void setFixedShutter(libcamera::utils::Duration fixedShutter);
void setFixedAnalogueGain(double fixedAnalogueGain);
void setMeteringMode(std::string const &meteringModeName);
void setExposureMode(std::string const &exposureModeName);
void setConstraintMode(std::string const &contraintModeName);
void enableAuto();
void disableAuto();
void switchMode(CameraMode const &cameraMode, Metadata *metadata);
void prepare(Metadata *imageMetadata);
void process(StatisticsPtr &stats, DeviceStatus const &deviceStatus, Metadata *imageMetadata,
const AgcChannelTotalExposures &channelTotalExposures);
private:
bool updateLockStatus(DeviceStatus const &deviceStatus);
AgcConfig config_;
void housekeepConfig();
void fetchCurrentExposure(DeviceStatus const &deviceStatus);
void fetchAwbStatus(Metadata *imageMetadata);
void computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
double &gain, double &targetY);
void computeTargetExposure(double gain);
void filterExposure();
bool applyChannelConstraints(const AgcChannelTotalExposures &channelTotalExposures);
bool applyDigitalGain(double gain, double targetY, bool channelBound);
void divideUpExposure();
void writeAndFinish(Metadata *imageMetadata, bool desaturate);
libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter);
double limitGain(double gain) const;
AgcMeteringMode *meteringMode_;
AgcExposureMode *exposureMode_;
AgcConstraintMode *constraintMode_;
CameraMode mode_;
uint64_t frameCount_;
AwbStatus awb_;
struct ExposureValues {
ExposureValues();
libcamera::utils::Duration shutter;
double analogueGain;
libcamera::utils::Duration totalExposure;
libcamera::utils::Duration totalExposureNoDG; /* without digital gain */
};
ExposureValues current_; /* values for the current frame */
ExposureValues target_; /* calculate the values we want here */
ExposureValues filtered_; /* these values are filtered towards target */
AgcStatus status_;
int lockCount_;
DeviceStatus lastDeviceStatus_;
libcamera::utils::Duration lastTargetExposure_;
/* Below here the "settings" that applications can change. */
std::string meteringModeName_;
std::string exposureModeName_;
std::string constraintModeName_;
double ev_;
libcamera::utils::Duration flickerPeriod_;
libcamera::utils::Duration maxShutter_;
libcamera::utils::Duration fixedShutter_;
double fixedAnalogueGain_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/focus.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2020, Raspberry Pi Ltd
*
* focus algorithm
*/
#pragma once
#include "../algorithm.h"
#include "../metadata.h"
/*
* The "focus" algorithm. All it does it print out a version of the
* focus contrast measure; there is no actual auto-focus mechanism to
* control.
*/
namespace RPiController {
class Focus : public Algorithm
{
public:
Focus(Controller *controller);
char const *name() const override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/ccm.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* CCM (colour correction matrix) control algorithm
*/
#pragma once
#include <vector>
#include <libipa/pwl.h>
#include "../ccm_algorithm.h"
namespace RPiController {
/* Algorithm to calculate colour matrix. Should be placed after AWB. */
struct Matrix {
Matrix(double m0, double m1, double m2, double m3, double m4, double m5,
double m6, double m7, double m8);
Matrix();
double m[3][3];
int read(const libcamera::YamlObject ¶ms);
};
static inline Matrix operator*(double d, Matrix const &m)
{
return Matrix(m.m[0][0] * d, m.m[0][1] * d, m.m[0][2] * d,
m.m[1][0] * d, m.m[1][1] * d, m.m[1][2] * d,
m.m[2][0] * d, m.m[2][1] * d, m.m[2][2] * d);
}
static inline Matrix operator*(Matrix const &m1, Matrix const &m2)
{
Matrix m;
for (int i = 0; i < 3; i++)
for (int j = 0; j < 3; j++)
m.m[i][j] = m1.m[i][0] * m2.m[0][j] +
m1.m[i][1] * m2.m[1][j] +
m1.m[i][2] * m2.m[2][j];
return m;
}
static inline Matrix operator+(Matrix const &m1, Matrix const &m2)
{
Matrix m;
for (int i = 0; i < 3; i++)
for (int j = 0; j < 3; j++)
m.m[i][j] = m1.m[i][j] + m2.m[i][j];
return m;
}
struct CtCcm {
double ct;
Matrix ccm;
};
struct CcmConfig {
std::vector<CtCcm> ccms;
libcamera::ipa::Pwl saturation;
};
class Ccm : public CcmAlgorithm
{
public:
Ccm(Controller *controller = NULL);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void setSaturation(double saturation) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
private:
CcmConfig config_;
double saturation_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/black_level.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* black level control algorithm
*/
#pragma once
#include "../black_level_algorithm.h"
#include "../black_level_status.h"
/* This is our implementation of the "black level algorithm". */
namespace RPiController {
class BlackLevel : public BlackLevelAlgorithm
{
public:
BlackLevel(Controller *controller);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void initialValues(uint16_t &blackLevelR, uint16_t &blackLevelG,
uint16_t &blackLevelB) override;
void prepare(Metadata *imageMetadata) override;
private:
double blackLevelR_;
double blackLevelG_;
double blackLevelB_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/awb.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* AWB control algorithm
*/
#pragma once
#include <mutex>
#include <condition_variable>
#include <thread>
#include <libcamera/geometry.h>
#include "../awb_algorithm.h"
#include "../awb_status.h"
#include "../statistics.h"
#include "libipa/pwl.h"
namespace RPiController {
/* Control algorithm to perform AWB calculations. */
struct AwbMode {
int read(const libcamera::YamlObject ¶ms);
double ctLo; /* low CT value for search */
double ctHi; /* high CT value for search */
};
struct AwbPrior {
int read(const libcamera::YamlObject ¶ms);
double lux; /* lux level */
libcamera::ipa::Pwl prior; /* maps CT to prior log likelihood for this lux level */
};
struct AwbConfig {
AwbConfig() : defaultMode(nullptr) {}
int read(const libcamera::YamlObject ¶ms);
/* Only repeat the AWB calculation every "this many" frames */
uint16_t framePeriod;
/* number of initial frames for which speed taken as 1.0 (maximum) */
uint16_t startupFrames;
unsigned int convergenceFrames; /* approx number of frames to converge */
double speed; /* IIR filter speed applied to algorithm results */
bool fast; /* "fast" mode uses a 16x16 rather than 32x32 grid */
libcamera::ipa::Pwl ctR; /* function maps CT to r (= R/G) */
libcamera::ipa::Pwl ctB; /* function maps CT to b (= B/G) */
libcamera::ipa::Pwl ctRInverse; /* inverse of ctR */
libcamera::ipa::Pwl ctBInverse; /* inverse of ctB */
/* table of illuminant priors at different lux levels */
std::vector<AwbPrior> priors;
/* AWB "modes" (determines the search range) */
std::map<std::string, AwbMode> modes;
AwbMode *defaultMode; /* mode used if no mode selected */
/*
* minimum proportion of pixels counted within AWB region for it to be
* "useful"
*/
double minPixels;
/* minimum G value of those pixels, to be regarded a "useful" */
uint16_t minG;
/*
* number of AWB regions that must be "useful" in order to do the AWB
* calculation
*/
uint32_t minRegions;
/* clamp on colour error term (so as not to penalise non-grey excessively) */
double deltaLimit;
/* step size control in coarse search */
double coarseStep;
/* how far to wander off CT curve towards "more purple" */
double transversePos;
/* how far to wander off CT curve towards "more green" */
double transverseNeg;
/*
* red sensitivity ratio (set to canonical sensor's R/G divided by this
* sensor's R/G)
*/
double sensitivityR;
/*
* blue sensitivity ratio (set to canonical sensor's B/G divided by this
* sensor's B/G)
*/
double sensitivityB;
/* The whitepoint (which we normally "aim" for) can be moved. */
double whitepointR;
double whitepointB;
bool bayes; /* use Bayesian algorithm */
};
class Awb : public AwbAlgorithm
{
public:
Awb(Controller *controller = NULL);
~Awb();
char const *name() const override;
void initialise() override;
int read(const libcamera::YamlObject ¶ms) override;
unsigned int getConvergenceFrames() const override;
void initialValues(double &gainR, double &gainB) override;
void setMode(std::string const &name) override;
void setManualGains(double manualR, double manualB) override;
void enableAuto() override;
void disableAuto() override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
struct RGB {
RGB(double r = 0, double g = 0, double b = 0)
: R(r), G(g), B(b)
{
}
double R, G, B;
RGB &operator+=(RGB const &other)
{
R += other.R, G += other.G, B += other.B;
return *this;
}
};
private:
bool isAutoEnabled() const;
/* configuration is read-only, and available to both threads */
AwbConfig config_;
std::thread asyncThread_;
void asyncFunc(); /* asynchronous thread function */
std::mutex mutex_;
/* condvar for async thread to wait on */
std::condition_variable asyncSignal_;
/* condvar for synchronous thread to wait on */
std::condition_variable syncSignal_;
/* for sync thread to check if async thread finished (requires mutex) */
bool asyncFinished_;
/* for async thread to check if it's been told to run (requires mutex) */
bool asyncStart_;
/* for async thread to check if it's been told to quit (requires mutex) */
bool asyncAbort_;
/*
* The following are only for the synchronous thread to use:
* for sync thread to note its has asked async thread to run
*/
bool asyncStarted_;
/* counts up to framePeriod before restarting the async thread */
int framePhase_;
int frameCount_; /* counts up to startup_frames */
AwbStatus syncResults_;
AwbStatus prevSyncResults_;
std::string modeName_;
/*
* The following are for the asynchronous thread to use, though the main
* thread can set/reset them if the async thread is known to be idle:
*/
void restartAsync(StatisticsPtr &stats, double lux);
/* copy out the results from the async thread so that it can be restarted */
void fetchAsyncResults();
StatisticsPtr statistics_;
AwbMode *mode_;
double lux_;
AwbStatus asyncResults_;
void doAwb();
void awbBayes();
void awbGrey();
void prepareStats();
double computeDelta2Sum(double gainR, double gainB);
libcamera::ipa::Pwl interpolatePrior();
double coarseSearch(libcamera::ipa::Pwl const &prior);
void fineSearch(double &t, double &r, double &b, libcamera::ipa::Pwl const &prior);
std::vector<RGB> zones_;
std::vector<libcamera::ipa::Pwl::Point> points_;
/* manual r setting */
double manualR_;
/* manual b setting */
double manualB_;
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)
{
return Awb::RGB(a.R + b.R, a.G + b.G, a.B + b.B);
}
static inline Awb::RGB operator-(Awb::RGB const &a, Awb::RGB const &b)
{
return Awb::RGB(a.R - b.R, a.G - b.G, a.B - b.B);
}
static inline Awb::RGB operator*(double d, Awb::RGB const &rgb)
{
return Awb::RGB(d * rgb.R, d * rgb.G, d * rgb.B);
}
static inline Awb::RGB operator*(Awb::RGB const &rgb, double d)
{
return d * rgb;
}
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/sharpen.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* sharpening control algorithm
*/
#pragma once
#include "../sharpen_algorithm.h"
#include "../sharpen_status.h"
/* This is our implementation of the "sharpen algorithm". */
namespace RPiController {
class Sharpen : public SharpenAlgorithm
{
public:
Sharpen(Controller *controller);
char const *name() const override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
int read(const libcamera::YamlObject ¶ms) override;
void setStrength(double strength) override;
void prepare(Metadata *imageMetadata) override;
private:
double threshold_;
double strength_;
double limit_;
double modeFactor_;
double userStrength_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/noise.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* Noise control algorithm
*/
#include <math.h>
#include <libcamera/base/log.h>
#include "../device_status.h"
#include "../noise_status.h"
#include "noise.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
Noise::Noise(Controller *controller)
: Algorithm(controller), modeFactor_(1.0)
{
}
char const *Noise::name() const
{
return NAME;
}
void Noise::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
/*
* For example, we would expect a 2x2 binned mode to have a "noise
* factor" of sqrt(2x2) = 2. (can't be less than one, right?)
*/
modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
int Noise::read(const libcamera::YamlObject ¶ms)
{
auto value = params["reference_constant"].get<double>();
if (!value)
return -EINVAL;
referenceConstant_ = *value;
value = params["reference_slope"].get<double>();
if (!value)
return -EINVAL;
referenceSlope_ = *value;
return 0;
}
void Noise::prepare(Metadata *imageMetadata)
{
struct DeviceStatus deviceStatus;
deviceStatus.analogueGain = 1.0; /* keep compiler calm */
if (imageMetadata->get("device.status", deviceStatus) == 0) {
/*
* There is a slight question as to exactly how the noise
* profile, specifically the constant part of it, scales. For
* now we assume it all scales the same, and we'll revisit this
* if it proves substantially wrong. NOTE: we may also want to
* make some adjustments based on the camera mode (such as
* binning), if we knew how to discover it...
*/
double factor = sqrt(deviceStatus.analogueGain) / modeFactor_;
struct NoiseStatus status;
status.noiseConstant = referenceConstant_ * factor;
status.noiseSlope = referenceSlope_ * factor;
imageMetadata->set("noise.status", status);
LOG(RPiNoise, Debug)
<< "constant " << status.noiseConstant
<< " slope " << status.noiseSlope;
} else
LOG(RPiNoise, Warning) << " no metadata";
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return new Noise(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/agc_channel.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2023, Raspberry Pi Ltd
*
* AGC/AEC control algorithm
*/
#include "agc_channel.h"
#include <algorithm>
#include <tuple>
#include <libcamera/base/log.h>
#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.h"
#include "../lux_status.h"
#include "../metadata.h"
using namespace RPiController;
using namespace libcamera;
using libcamera::utils::Duration;
using namespace std::literals::chrono_literals;
LOG_DECLARE_CATEGORY(RPiAgc)
int AgcMeteringMode::read(const libcamera::YamlObject ¶ms)
{
const YamlObject &yamlWeights = params["weights"];
for (const auto &p : yamlWeights.asList()) {
auto value = p.get<double>();
if (!value)
return -EINVAL;
weights.push_back(*value);
}
return 0;
}
static std::tuple<int, std::string>
readMeteringModes(std::map<std::string, AgcMeteringMode> &metering_modes,
const libcamera::YamlObject ¶ms)
{
std::string first;
int ret;
for (const auto &[key, value] : params.asDict()) {
AgcMeteringMode meteringMode;
ret = meteringMode.read(value);
if (ret)
return { ret, {} };
metering_modes[key] = std::move(meteringMode);
if (first.empty())
first = key;
}
return { 0, first };
}
int AgcExposureMode::read(const libcamera::YamlObject ¶ms)
{
auto value = params["shutter"].getList<double>();
if (!value)
return -EINVAL;
std::transform(value->begin(), value->end(), std::back_inserter(shutter),
[](double v) { return v * 1us; });
value = params["gain"].getList<double>();
if (!value)
return -EINVAL;
gain = std::move(*value);
if (shutter.size() < 2 || gain.size() < 2) {
LOG(RPiAgc, Error)
<< "AgcExposureMode: must have at least two entries in exposure profile";
return -EINVAL;
}
if (shutter.size() != gain.size()) {
LOG(RPiAgc, Error)
<< "AgcExposureMode: expect same number of exposure and gain entries in exposure profile";
return -EINVAL;
}
return 0;
}
static std::tuple<int, std::string>
readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
const libcamera::YamlObject ¶ms)
{
std::string first;
int ret;
for (const auto &[key, value] : params.asDict()) {
AgcExposureMode exposureMode;
ret = exposureMode.read(value);
if (ret)
return { ret, {} };
exposureModes[key] = std::move(exposureMode);
if (first.empty())
first = key;
}
return { 0, first };
}
int AgcConstraint::read(const libcamera::YamlObject ¶ms)
{
std::string boundString = params["bound"].get<std::string>("");
transform(boundString.begin(), boundString.end(),
boundString.begin(), ::toupper);
if (boundString != "UPPER" && boundString != "LOWER") {
LOG(RPiAgc, Error) << "AGC constraint type should be UPPER or LOWER";
return -EINVAL;
}
bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
auto value = params["q_lo"].get<double>();
if (!value)
return -EINVAL;
qLo = *value;
value = params["q_hi"].get<double>();
if (!value)
return -EINVAL;
qHi = *value;
yTarget = params["y_target"].get<ipa::Pwl>(ipa::Pwl{});
return yTarget.empty() ? -EINVAL : 0;
}
static std::tuple<int, AgcConstraintMode>
readConstraintMode(const libcamera::YamlObject ¶ms)
{
AgcConstraintMode mode;
int ret;
for (const auto &p : params.asList()) {
AgcConstraint constraint;
ret = constraint.read(p);
if (ret)
return { ret, {} };
mode.push_back(std::move(constraint));
}
return { 0, mode };
}
static std::tuple<int, std::string>
readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
const libcamera::YamlObject ¶ms)
{
std::string first;
int ret;
for (const auto &[key, value] : params.asDict()) {
std::tie(ret, constraintModes[key]) = readConstraintMode(value);
if (ret)
return { ret, {} };
if (first.empty())
first = key;
}
return { 0, first };
}
int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms)
{
auto channelValue = params["channel"].get<unsigned int>();
if (!channelValue) {
LOG(RPiAgc, Error) << "AGC channel constraint must have a channel";
return -EINVAL;
}
channel = *channelValue;
std::string boundString = params["bound"].get<std::string>("");
transform(boundString.begin(), boundString.end(),
boundString.begin(), ::toupper);
if (boundString != "UPPER" && boundString != "LOWER") {
LOG(RPiAgc, Error) << "AGC channel constraint type should be UPPER or LOWER";
return -EINVAL;
}
bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
auto factorValue = params["factor"].get<double>();
if (!factorValue) {
LOG(RPiAgc, Error) << "AGC channel constraint must have a factor";
return -EINVAL;
}
factor = *factorValue;
return 0;
}
static int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints,
const libcamera::YamlObject ¶ms)
{
for (const auto &p : params.asList()) {
AgcChannelConstraint constraint;
int ret = constraint.read(p);
if (ret)
return ret;
channelConstraints.push_back(constraint);
}
return 0;
}
int AgcConfig::read(const libcamera::YamlObject ¶ms)
{
LOG(RPiAgc, Debug) << "AgcConfig";
int ret;
std::tie(ret, defaultMeteringMode) =
readMeteringModes(meteringModes, params["metering_modes"]);
if (ret)
return ret;
std::tie(ret, defaultExposureMode) =
readExposureModes(exposureModes, params["exposure_modes"]);
if (ret)
return ret;
std::tie(ret, defaultConstraintMode) =
readConstraintModes(constraintModes, params["constraint_modes"]);
if (ret)
return ret;
if (params.contains("channel_constraints")) {
ret = readChannelConstraints(channelConstraints, params["channel_constraints"]);
if (ret)
return ret;
}
yTarget = params["y_target"].get<ipa::Pwl>(ipa::Pwl{});
if (yTarget.empty())
return -EINVAL;
speed = params["speed"].get<double>(0.2);
startupFrames = params["startup_frames"].get<uint16_t>(10);
convergenceFrames = params["convergence_frames"].get<unsigned int>(6);
fastReduceThreshold = params["fast_reduce_threshold"].get<double>(0.4);
baseEv = params["base_ev"].get<double>(1.0);
/* Start with quite a low value as ramping up is easier than ramping down. */
defaultExposureTime = params["default_exposure_time"].get<double>(1000) * 1us;
defaultAnalogueGain = params["default_analogue_gain"].get<double>(1.0);
stableRegion = params["stable_region"].get<double>(0.02);
desaturate = params["desaturate"].get<int>(1);
return 0;
}
AgcChannel::ExposureValues::ExposureValues()
: shutter(0s), analogueGain(0),
totalExposure(0s), totalExposureNoDG(0s)
{
}
AgcChannel::AgcChannel()
: meteringMode_(nullptr), exposureMode_(nullptr), constraintMode_(nullptr),
frameCount_(0), lockCount_(0),
lastTargetExposure_(0s), ev_(1.0), flickerPeriod_(0s),
maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
{
/* Set AWB default values in case early frames have no updates in metadata. */
awb_.gainR = 1.0;
awb_.gainG = 1.0;
awb_.gainB = 1.0;
/*
* Setting status_.totalExposureValue_ to zero initially tells us
* it's not been calculated yet (i.e. Process hasn't yet run).
*/
status_ = {};
status_.ev = ev_;
}
int AgcChannel::read(const libcamera::YamlObject ¶ms,
const Controller::HardwareConfig &hardwareConfig)
{
int ret = config_.read(params);
if (ret)
return ret;
const Size &size = hardwareConfig.agcZoneWeights;
for (auto const &modes : config_.meteringModes) {
if (modes.second.weights.size() != size.width * size.height) {
LOG(RPiAgc, Error) << "AgcMeteringMode: Incorrect number of weights";
return -EINVAL;
}
}
/*
* Set the config's defaults (which are the first ones it read) as our
* current modes, until someone changes them. (they're all known to
* exist at this point)
*/
meteringModeName_ = config_.defaultMeteringMode;
meteringMode_ = &config_.meteringModes[meteringModeName_];
exposureModeName_ = config_.defaultExposureMode;
exposureMode_ = &config_.exposureModes[exposureModeName_];
constraintModeName_ = config_.defaultConstraintMode;
constraintMode_ = &config_.constraintModes[constraintModeName_];
/* Set up the "last shutter/gain" values, in case AGC starts "disabled". */
status_.shutterTime = config_.defaultExposureTime;
status_.analogueGain = config_.defaultAnalogueGain;
return 0;
}
void AgcChannel::disableAuto()
{
fixedShutter_ = status_.shutterTime;
fixedAnalogueGain_ = status_.analogueGain;
}
void AgcChannel::enableAuto()
{
fixedShutter_ = 0s;
fixedAnalogueGain_ = 0;
}
unsigned int AgcChannel::getConvergenceFrames() const
{
/*
* If shutter and gain have been explicitly set, there is no
* convergence to happen, so no need to drop any frames - return zero.
*/
if (fixedShutter_ && fixedAnalogueGain_)
return 0;
else
return config_.convergenceFrames;
}
std::vector<double> const &AgcChannel::getWeights() const
{
/*
* In case someone calls setMeteringMode and then this before the
* algorithm has run and updated the meteringMode_ pointer.
*/
auto it = config_.meteringModes.find(meteringModeName_);
if (it == config_.meteringModes.end())
return meteringMode_->weights;
return it->second.weights;
}
void AgcChannel::setEv(double ev)
{
ev_ = ev;
}
void AgcChannel::setFlickerPeriod(Duration flickerPeriod)
{
flickerPeriod_ = flickerPeriod;
}
void AgcChannel::setMaxShutter(Duration maxShutter)
{
maxShutter_ = maxShutter;
}
void AgcChannel::setFixedShutter(Duration fixedShutter)
{
fixedShutter_ = fixedShutter;
/* Set this in case someone calls disableAuto() straight after. */
status_.shutterTime = limitShutter(fixedShutter_);
}
void AgcChannel::setFixedAnalogueGain(double fixedAnalogueGain)
{
fixedAnalogueGain_ = fixedAnalogueGain;
/* Set this in case someone calls disableAuto() straight after. */
status_.analogueGain = limitGain(fixedAnalogueGain);
}
void AgcChannel::setMeteringMode(std::string const &meteringModeName)
{
meteringModeName_ = meteringModeName;
}
void AgcChannel::setExposureMode(std::string const &exposureModeName)
{
exposureModeName_ = exposureModeName;
}
void AgcChannel::setConstraintMode(std::string const &constraintModeName)
{
constraintModeName_ = constraintModeName;
}
void AgcChannel::switchMode(CameraMode const &cameraMode,
Metadata *metadata)
{
/* AGC expects the mode sensitivity always to be non-zero. */
ASSERT(cameraMode.sensitivity);
housekeepConfig();
/*
* Store the mode in the local state. We must cache the sensitivity of
* of the previous mode for the calculations below.
*/
double lastSensitivity = mode_.sensitivity;
mode_ = cameraMode;
Duration fixedShutter = limitShutter(fixedShutter_);
if (fixedShutter && fixedAnalogueGain_) {
/* We're going to reset the algorithm here with these fixed values. */
fetchAwbStatus(metadata);
double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
ASSERT(minColourGain != 0.0);
/* This is the equivalent of computeTargetExposure and applyDigitalGain. */
target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
target_.totalExposure = target_.totalExposureNoDG / minColourGain;
/* Equivalent of filterExposure. This resets any "history". */
filtered_ = target_;
/* Equivalent of divideUpExposure. */
filtered_.shutter = fixedShutter;
filtered_.analogueGain = fixedAnalogueGain_;
} else if (status_.totalExposureValue) {
/*
* On a mode switch, various things could happen:
* - the exposure profile might change
* - a fixed exposure or gain might be set
* - the new mode's sensitivity might be different
* We cope with the last of these by scaling the target values. After
* that we just need to re-divide the exposure/gain according to the
* current exposure profile, which takes care of everything else.
*/
double ratio = lastSensitivity / cameraMode.sensitivity;
target_.totalExposureNoDG *= ratio;
target_.totalExposure *= ratio;
filtered_.totalExposureNoDG *= ratio;
filtered_.totalExposure *= ratio;
divideUpExposure();
} else {
/*
* We come through here on startup, when at least one of the shutter
* or gain has not been fixed. We must still write those values out so
* that they will be applied immediately. We supply some arbitrary defaults
* for any that weren't set.
*/
/* Equivalent of divideUpExposure. */
filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
}
writeAndFinish(metadata, false);
}
void AgcChannel::prepare(Metadata *imageMetadata)
{
Duration totalExposureValue = status_.totalExposureValue;
AgcStatus delayedStatus;
AgcPrepareStatus prepareStatus;
/* Fetch the AWB status now because AWB also sets it in the prepare method. */
fetchAwbStatus(imageMetadata);
if (!imageMetadata->get("agc.delayed_status", delayedStatus))
totalExposureValue = delayedStatus.totalExposureValue;
prepareStatus.digitalGain = 1.0;
prepareStatus.locked = false;
if (status_.totalExposureValue) {
/* Process has run, so we have meaningful values. */
DeviceStatus deviceStatus;
if (imageMetadata->get("device.status", deviceStatus) == 0) {
Duration actualExposure = deviceStatus.shutterSpeed *
deviceStatus.analogueGain;
if (actualExposure) {
double digitalGain = totalExposureValue / actualExposure;
LOG(RPiAgc, Debug) << "Want total exposure " << totalExposureValue;
/*
* Never ask for a gain < 1.0, and also impose
* some upper limit. Make it customisable?
*/
prepareStatus.digitalGain = std::max(1.0, std::min(digitalGain, 4.0));
LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
LOG(RPiAgc, Debug) << "Use digitalGain " << prepareStatus.digitalGain;
LOG(RPiAgc, Debug) << "Effective exposure "
<< actualExposure * prepareStatus.digitalGain;
/* Decide whether AEC/AGC has converged. */
prepareStatus.locked = updateLockStatus(deviceStatus);
}
} else
LOG(RPiAgc, Warning) << "AgcChannel: no device metadata";
imageMetadata->set("agc.prepare_status", prepareStatus);
}
}
void AgcChannel::process(StatisticsPtr &stats, DeviceStatus const &deviceStatus,
Metadata *imageMetadata,
const AgcChannelTotalExposures &channelTotalExposures)
{
frameCount_++;
/*
* First a little bit of housekeeping, fetching up-to-date settings and
* configuration, that kind of thing.
*/
housekeepConfig();
/* Get the current exposure values for the frame that's just arrived. */
fetchCurrentExposure(deviceStatus);
/* Compute the total gain we require relative to the current exposure. */
double gain, targetY;
computeGain(stats, imageMetadata, gain, targetY);
/* Now compute the target (final) exposure which we think we want. */
computeTargetExposure(gain);
/* The results have to be filtered so as not to change too rapidly. */
filterExposure();
/*
* We may be asked to limit the exposure using other channels. If another channel
* determines our upper bound we may want to know this later.
*/
bool channelBound = applyChannelConstraints(channelTotalExposures);
/*
* Some of the exposure has to be applied as digital gain, so work out
* what that is. It also tells us whether it's trying to desaturate the image
* more quickly, which can only happen when another channel is not limiting us.
*/
bool desaturate = applyDigitalGain(gain, targetY, channelBound);
/*
* The last thing is to divide up the exposure value into a shutter time
* and analogue gain, according to the current exposure mode.
*/
divideUpExposure();
/* Finally advertise what we've done. */
writeAndFinish(imageMetadata, desaturate);
}
bool AgcChannel::updateLockStatus(DeviceStatus const &deviceStatus)
{
const double errorFactor = 0.10; /* make these customisable? */
const int maxLockCount = 5;
/* Reset "lock count" when we exceed this multiple of errorFactor */
const double resetMargin = 1.5;
/* Add 200us to the exposure time error to allow for line quantisation. */
Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
double gainError = lastDeviceStatus_.analogueGain * errorFactor;
Duration targetError = lastTargetExposure_ * errorFactor;
/*
* Note that we don't know the exposure/gain limits of the sensor, so
* the values we keep requesting may be unachievable. For this reason
* we only insist that we're close to values in the past few frames.
*/
if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
status_.targetExposureValue > lastTargetExposure_ - targetError &&
status_.targetExposureValue < lastTargetExposure_ + targetError)
lockCount_ = std::min(lockCount_ + 1, maxLockCount);
else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
lockCount_ = 0;
lastDeviceStatus_ = deviceStatus;
lastTargetExposure_ = status_.targetExposureValue;
LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
return lockCount_ == maxLockCount;
}
void AgcChannel::housekeepConfig()
{
/* First fetch all the up-to-date settings, so no one else has to do it. */
status_.ev = ev_;
status_.fixedShutter = limitShutter(fixedShutter_);
status_.fixedAnalogueGain = fixedAnalogueGain_;
status_.flickerPeriod = flickerPeriod_;
LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
<< status_.fixedShutter << " fixedAnalogueGain "
<< status_.fixedAnalogueGain;
/*
* Make sure the "mode" pointers point to the up-to-date things, if
* they've changed.
*/
if (meteringModeName_ != status_.meteringMode) {
auto it = config_.meteringModes.find(meteringModeName_);
if (it == config_.meteringModes.end()) {
LOG(RPiAgc, Warning) << "No metering mode " << meteringModeName_;
meteringModeName_ = status_.meteringMode;
} else {
meteringMode_ = &it->second;
status_.meteringMode = meteringModeName_;
}
}
if (exposureModeName_ != status_.exposureMode) {
auto it = config_.exposureModes.find(exposureModeName_);
if (it == config_.exposureModes.end()) {
LOG(RPiAgc, Warning) << "No exposure profile " << exposureModeName_;
exposureModeName_ = status_.exposureMode;
} else {
exposureMode_ = &it->second;
status_.exposureMode = exposureModeName_;
}
}
if (constraintModeName_ != status_.constraintMode) {
auto it = config_.constraintModes.find(constraintModeName_);
if (it == config_.constraintModes.end()) {
LOG(RPiAgc, Warning) << "No constraint list " << constraintModeName_;
constraintModeName_ = status_.constraintMode;
} else {
constraintMode_ = &it->second;
status_.constraintMode = constraintModeName_;
}
}
LOG(RPiAgc, Debug) << "exposureMode "
<< exposureModeName_ << " constraintMode "
<< constraintModeName_ << " meteringMode "
<< meteringModeName_;
}
void AgcChannel::fetchCurrentExposure(DeviceStatus const &deviceStatus)
{
current_.shutter = deviceStatus.shutterSpeed;
current_.analogueGain = deviceStatus.analogueGain;
current_.totalExposure = 0s; /* this value is unused */
current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
}
void AgcChannel::fetchAwbStatus(Metadata *imageMetadata)
{
if (imageMetadata->get("awb.status", awb_) != 0)
LOG(RPiAgc, Debug) << "No AWB status found";
}
static double computeInitialY(StatisticsPtr &stats, AwbStatus const &awb,
std::vector<double> &weights, double gain)
{
constexpr uint64_t maxVal = 1 << Statistics::NormalisationFactorPow2;
/*
* If we have no AGC region stats, but do have a a Y histogram, use that
* directly to caluclate the mean Y value of the image.
*/
if (!stats->agcRegions.numRegions() && stats->yHist.bins()) {
/*
* When the gain is applied to the histogram, anything below minBin
* will scale up directly with the gain, but anything above that
* will saturate into the top bin.
*/
auto &hist = stats->yHist;
double minBin = std::min(1.0, 1.0 / gain) * hist.bins();
double binMean = hist.interBinMean(0.0, minBin);
double numUnsaturated = hist.cumulativeFreq(minBin);
/* This term is from all the pixels that won't saturate. */
double ySum = binMean * gain * numUnsaturated;
/* And add the ones that will saturate. */
ySum += (hist.total() - numUnsaturated) * hist.bins();
return ySum / hist.total() / hist.bins();
}
ASSERT(weights.size() == stats->agcRegions.numRegions());
/*
* Note that the weights are applied by the IPA to the statistics directly,
* before they are given to us here.
*/
double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
for (unsigned int i = 0; i < stats->agcRegions.numRegions(); i++) {
auto ®ion = stats->agcRegions.get(i);
rSum += std::min<double>(region.val.rSum * gain, (maxVal - 1) * region.counted);
gSum += std::min<double>(region.val.gSum * gain, (maxVal - 1) * region.counted);
bSum += std::min<double>(region.val.bSum * gain, (maxVal - 1) * region.counted);
pixelSum += region.counted;
}
if (pixelSum == 0.0) {
LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
return 0;
}
double ySum;
/* Factor in the AWB correction if needed. */
if (stats->agcStatsPos == Statistics::AgcStatsPos::PreWb) {
ySum = rSum * awb.gainR * .299 +
gSum * awb.gainG * .587 +
bSum * awb.gainB * .114;
} else
ySum = rSum * .299 + gSum * .587 + bSum * .114;
return ySum / pixelSum / (1 << 16);
}
/*
* We handle extra gain through EV by adjusting our Y targets. However, you
* simply can't monitor histograms once they get very close to (or beyond!)
* saturation, so we clamp the Y targets to this value. It does mean that EV
* increases don't necessarily do quite what you might expect in certain
* (contrived) cases.
*/
static constexpr double EvGainYTargetLimit = 0.9;
static double constraintComputeGain(AgcConstraint &c, const Histogram &h, double lux,
double evGain, double &targetY)
{
targetY = c.yTarget.eval(c.yTarget.domain().clamp(lux));
targetY = std::min(EvGainYTargetLimit, targetY * evGain);
double iqm = h.interQuantileMean(c.qLo, c.qHi);
return (targetY * h.bins()) / iqm;
}
void AgcChannel::computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
double &gain, double &targetY)
{
struct LuxStatus lux = {};
lux.lux = 400; /* default lux level to 400 in case no metadata found */
if (imageMetadata->get("lux.status", lux) != 0)
LOG(RPiAgc, Warning) << "No lux level found";
const Histogram &h = statistics->yHist;
double evGain = status_.ev * config_.baseEv;
/*
* The initial gain and target_Y come from some of the regions. After
* that we consider the histogram constraints.
*/
targetY = config_.yTarget.eval(config_.yTarget.domain().clamp(lux.lux));
targetY = std::min(EvGainYTargetLimit, targetY * evGain);
/*
* Do this calculation a few times as brightness increase can be
* non-linear when there are saturated regions.
*/
gain = 1.0;
for (int i = 0; i < 8; i++) {
double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
double extraGain = std::min(10.0, targetY / (initialY + .001));
gain *= extraGain;
LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
<< " gives gain " << gain;
if (extraGain < 1.01) /* close enough */
break;
}
for (auto &c : *constraintMode_) {
double newTargetY;
double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
LOG(RPiAgc, Debug) << "Constraint has target_Y "
<< newTargetY << " giving gain " << newGain;
if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
gain = newGain;
targetY = newTargetY;
} else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
gain = newGain;
targetY = newTargetY;
}
}
LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
<< status_.ev << " base_ev " << config_.baseEv
<< ")";
}
void AgcChannel::computeTargetExposure(double gain)
{
if (status_.fixedShutter && status_.fixedAnalogueGain) {
/*
* When ag and shutter are both fixed, we need to drive the
* total exposure so that we end up with a digital gain of at least
* 1/minColourGain. Otherwise we'd desaturate channels causing
* white to go cyan or magenta.
*/
double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
ASSERT(minColourGain != 0.0);
target_.totalExposure =
status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
} else {
/*
* The statistics reflect the image without digital gain, so the final
* total exposure we're aiming for is:
*/
target_.totalExposure = current_.totalExposureNoDG * gain;
/* The final target exposure is also limited to what the exposure mode allows. */
Duration maxShutter = status_.fixedShutter
? status_.fixedShutter
: exposureMode_->shutter.back();
maxShutter = limitShutter(maxShutter);
Duration maxTotalExposure =
maxShutter *
(status_.fixedAnalogueGain != 0.0
? status_.fixedAnalogueGain
: exposureMode_->gain.back());
target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
}
LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
}
bool AgcChannel::applyChannelConstraints(const AgcChannelTotalExposures &channelTotalExposures)
{
bool channelBound = false;
LOG(RPiAgc, Debug)
<< "Total exposure before channel constraints " << filtered_.totalExposure;
for (const auto &constraint : config_.channelConstraints) {
LOG(RPiAgc, Debug)
<< "Check constraint: channel " << constraint.channel << " bound "
<< (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER")
<< " factor " << constraint.factor;
if (constraint.channel >= channelTotalExposures.size() ||
!channelTotalExposures[constraint.channel]) {
LOG(RPiAgc, Debug) << "no such channel or no exposure available- skipped";
continue;
}
libcamera::utils::Duration limitExposure =
channelTotalExposures[constraint.channel] * constraint.factor;
LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure;
if ((constraint.bound == AgcChannelConstraint::Bound::UPPER &&
filtered_.totalExposure > limitExposure) ||
(constraint.bound == AgcChannelConstraint::Bound::LOWER &&
filtered_.totalExposure < limitExposure)) {
filtered_.totalExposure = limitExposure;
LOG(RPiAgc, Debug) << "Constraint applies";
channelBound = true;
} else
LOG(RPiAgc, Debug) << "Constraint does not apply";
}
LOG(RPiAgc, Debug)
<< "Total exposure after channel constraints " << filtered_.totalExposure;
return channelBound;
}
bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound)
{
double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
ASSERT(minColourGain != 0.0);
double dg = 1.0 / minColourGain;
/*
* I think this pipeline subtracts black level and rescales before we
* get the stats, so no need to worry about it.
*/
LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
<< " target_Y " << targetY;
/*
* Finally, if we're trying to reduce exposure but the target_Y is
* "close" to 1.0, then the gain computed for that constraint will be
* only slightly less than one, because the measured Y can never be
* larger than 1.0. When this happens, demand a large digital gain so
* that the exposure can be reduced, de-saturating the image much more
* quickly (and we then approach the correct value more quickly from
* below).
*/
bool desaturate = false;
if (config_.desaturate)
desaturate = !channelBound &&
targetY > config_.fastReduceThreshold && gain < sqrt(targetY);
if (desaturate)
dg /= config_.fastReduceThreshold;
LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
filtered_.totalExposureNoDG = filtered_.totalExposure / dg;
LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << filtered_.totalExposureNoDG;
return desaturate;
}
void AgcChannel::filterExposure()
{
double speed = config_.speed;
double stableRegion = config_.stableRegion;
/*
* AGC adapts instantly if both shutter and gain are directly specified
* or we're in the startup phase.
*/
if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
frameCount_ <= config_.startupFrames)
speed = 1.0;
if (!filtered_.totalExposure) {
filtered_.totalExposure = target_.totalExposure;
} else if (filtered_.totalExposure * (1.0 - stableRegion) < target_.totalExposure &&
filtered_.totalExposure * (1.0 + stableRegion) > target_.totalExposure) {
/* Total exposure must change by more than this or we leave it alone. */
} else {
/*
* If close to the result go faster, to save making so many
* micro-adjustments on the way. (Make this customisable?)
*/
if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
filtered_.totalExposure > 0.8 * target_.totalExposure)
speed = sqrt(speed);
filtered_.totalExposure = speed * target_.totalExposure +
filtered_.totalExposure * (1.0 - speed);
}
LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
<< " no dg " << filtered_.totalExposureNoDG;
}
void AgcChannel::divideUpExposure()
{
/*
* Sending the fixed shutter/gain cases through the same code may seem
* unnecessary, but it will make more sense when extend this to cover
* variable aperture.
*/
Duration exposureValue = filtered_.totalExposureNoDG;
Duration shutterTime;
double analogueGain;
shutterTime = status_.fixedShutter ? status_.fixedShutter
: exposureMode_->shutter[0];
shutterTime = limitShutter(shutterTime);
analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
: exposureMode_->gain[0];
analogueGain = limitGain(analogueGain);
if (shutterTime * analogueGain < exposureValue) {
for (unsigned int stage = 1;
stage < exposureMode_->gain.size(); stage++) {
if (!status_.fixedShutter) {
Duration stageShutter =
limitShutter(exposureMode_->shutter[stage]);
if (stageShutter * analogueGain >= exposureValue) {
shutterTime = exposureValue / analogueGain;
break;
}
shutterTime = stageShutter;
}
if (status_.fixedAnalogueGain == 0.0) {
if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
analogueGain = exposureValue / shutterTime;
break;
}
analogueGain = exposureMode_->gain[stage];
analogueGain = limitGain(analogueGain);
}
}
}
LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
<< analogueGain;
/*
* Finally adjust shutter time for flicker avoidance (require both
* shutter and gain not to be fixed).
*/
if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
status_.flickerPeriod) {
int flickerPeriods = shutterTime / status_.flickerPeriod;
if (flickerPeriods) {
Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
analogueGain *= shutterTime / newShutterTime;
/*
* We should still not allow the ag to go over the
* largest value in the exposure mode. Note that this
* may force more of the total exposure into the digital
* gain as a side-effect.
*/
analogueGain = std::min(analogueGain, exposureMode_->gain.back());
analogueGain = limitGain(analogueGain);
shutterTime = newShutterTime;
}
LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
<< shutterTime << " gain " << analogueGain;
}
filtered_.shutter = shutterTime;
filtered_.analogueGain = analogueGain;
}
void AgcChannel::writeAndFinish(Metadata *imageMetadata, bool desaturate)
{
status_.totalExposureValue = filtered_.totalExposure;
status_.targetExposureValue = desaturate ? 0s : target_.totalExposure;
status_.shutterTime = filtered_.shutter;
status_.analogueGain = filtered_.analogueGain;
/*
* Write to metadata as well, in case anyone wants to update the camera
* immediately.
*/
imageMetadata->set("agc.status", status_);
LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
<< filtered_.totalExposure;
LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
<< " analogue gain " << filtered_.analogueGain;
}
Duration AgcChannel::limitShutter(Duration shutter)
{
/*
* shutter == 0 is a special case for fixed shutter values, and must pass
* through unchanged
*/
if (!shutter)
return shutter;
shutter = std::clamp(shutter, mode_.minShutter, maxShutter_);
return shutter;
}
double AgcChannel::limitGain(double gain) const
{
/*
* Only limit the lower bounds of the gain value to what the sensor limits.
* The upper bound on analogue gain will be made up with additional digital
* gain applied by the ISP.
*
* gain == 0.0 is a special case for fixed shutter values, and must pass
* through unchanged
*/
if (!gain)
return gain;
gain = std::max(gain, mode_.minAnalogueGain);
return gain;
}
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/sharpen.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* sharpening control algorithm
*/
#include <math.h>
#include <libcamera/base/log.h>
#include "../sharpen_status.h"
#include "sharpen.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
Sharpen::Sharpen(Controller *controller)
: SharpenAlgorithm(controller), userStrength_(1.0)
{
}
char const *Sharpen::name() const
{
return NAME;
}
void Sharpen::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
/* can't be less than one, right? */
modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
int Sharpen::read(const libcamera::YamlObject ¶ms)
{
threshold_ = params["threshold"].get<double>(1.0);
strength_ = params["strength"].get<double>(1.0);
limit_ = params["limit"].get<double>(1.0);
LOG(RPiSharpen, Debug)
<< "Read threshold " << threshold_
<< " strength " << strength_
<< " limit " << limit_;
return 0;
}
void Sharpen::setStrength(double strength)
{
/*
* Note that this function is how an application sets the overall
* sharpening "strength". We call this the "user strength" field
* as there already is a strength_ field - being an internal gain
* parameter that gets passed to the ISP control code. Negative
* values are not allowed - coerce them to zero (no sharpening).
*/
userStrength_ = std::max(0.0, strength);
}
void Sharpen::prepare(Metadata *imageMetadata)
{
/*
* The userStrength_ affects the algorithm's internal gain directly, but
* we adjust the limit and threshold less aggressively. Using a sqrt
* function is an arbitrary but gentle way of accomplishing this.
*/
double userStrengthSqrt = sqrt(userStrength_);
struct SharpenStatus status;
/*
* Binned modes seem to need the sharpening toned down with this
* pipeline, thus we use the modeFactor_ here. Also avoid
* divide-by-zero with the userStrengthSqrt.
*/
status.threshold = threshold_ * modeFactor_ /
std::max(0.01, userStrengthSqrt);
status.strength = strength_ / modeFactor_ * userStrength_;
status.limit = limit_ / modeFactor_ * userStrengthSqrt;
/* Finally, report any application-supplied parameters that were used. */
status.userStrength = userStrength_;
imageMetadata->set("sharpen.status", status);
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return new Sharpen(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/agc.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* AGC/AEC control algorithm
*/
#pragma once
#include <optional>
#include <string>
#include <vector>
#include "../agc_algorithm.h"
#include "agc_channel.h"
namespace RPiController {
struct AgcChannelData {
AgcChannel channel;
std::optional<DeviceStatus> deviceStatus;
StatisticsPtr statistics;
};
class Agc : public AgcAlgorithm
{
public:
Agc(Controller *controller);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
unsigned int getConvergenceFrames() const override;
std::vector<double> const &getWeights() const override;
void setEv(unsigned int channel, double ev) override;
void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) override;
void setMaxShutter(libcamera::utils::Duration maxShutter) override;
void setFixedShutter(unsigned int channelIndex,
libcamera::utils::Duration fixedShutter) override;
void setFixedAnalogueGain(unsigned int channelIndex,
double fixedAnalogueGain) override;
void setMeteringMode(std::string const &meteringModeName) override;
void setExposureMode(std::string const &exposureModeName) override;
void setConstraintMode(std::string const &contraintModeName) override;
void enableAuto() override;
void disableAuto() override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
void setActiveChannels(const std::vector<unsigned int> &activeChannels) override;
private:
int checkChannel(unsigned int channel) const;
std::vector<AgcChannelData> channelData_;
std::vector<unsigned int> activeChannels_;
unsigned int index_; /* index into the activeChannels_ */
AgcChannelTotalExposures channelTotalExposures_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/geq.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* GEQ (green equalisation) control algorithm
*/
#pragma once
#include <libipa/pwl.h>
#include "../algorithm.h"
#include "../geq_status.h"
namespace RPiController {
/* Back End algorithm to apply appropriate GEQ settings. */
struct GeqConfig {
uint16_t offset;
double slope;
libcamera::ipa::Pwl strength; /* lux to strength factor */
};
class Geq : public Algorithm
{
public:
Geq(Controller *controller);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void prepare(Metadata *imageMetadata) override;
private:
GeqConfig config_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/tonemap.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2022 Raspberry Pi Ltd
*
* Tonemap control algorithm
*/
#include "tonemap.h"
#include <libcamera/base/log.h>
#include "tonemap_status.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiTonemap)
#define NAME "rpi.tonemap"
Tonemap::Tonemap(Controller *controller)
: Algorithm(controller)
{
}
char const *Tonemap::name() const
{
return NAME;
}
int Tonemap::read(const libcamera::YamlObject ¶ms)
{
config_.detailConstant = params["detail_constant"].get<uint16_t>(0);
config_.detailSlope = params["detail_slope"].get<double>(0.1);
config_.iirStrength = params["iir_strength"].get<double>(1.0);
config_.strength = params["strength"].get<double>(1.0);
config_.tonemap = params["tone_curve"].get<ipa::Pwl>(ipa::Pwl{});
return 0;
}
void Tonemap::initialise()
{
}
void Tonemap::prepare(Metadata *imageMetadata)
{
TonemapStatus tonemapStatus;
tonemapStatus.detailConstant = config_.detailConstant;
tonemapStatus.detailSlope = config_.detailSlope;
tonemapStatus.iirStrength = config_.iirStrength;
tonemapStatus.strength = config_.strength;
tonemapStatus.tonemap = config_.tonemap;
imageMetadata->set("tonemap.status", tonemapStatus);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
{
return (Algorithm *)new Tonemap(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/dpc.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* DPC (defective pixel correction) control algorithm
*/
#include <libcamera/base/log.h>
#include "dpc.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiDpc)
/*
* We use the lux status so that we can apply stronger settings in darkness (if
* necessary).
*/
#define NAME "rpi.dpc"
Dpc::Dpc(Controller *controller)
: Algorithm(controller)
{
}
char const *Dpc::name() const
{
return NAME;
}
int Dpc::read(const libcamera::YamlObject ¶ms)
{
config_.strength = params["strength"].get<int>(1);
if (config_.strength < 0 || config_.strength > 2) {
LOG(RPiDpc, Error) << "Bad strength value";
return -EINVAL;
}
return 0;
}
void Dpc::prepare(Metadata *imageMetadata)
{
DpcStatus dpcStatus = {};
/* Should we vary this with lux level or analogue gain? TBD. */
dpcStatus.strength = config_.strength;
LOG(RPiDpc, Debug) << "strength " << dpcStatus.strength;
imageMetadata->set("dpc.status", dpcStatus);
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Dpc(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/agc.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* AGC/AEC control algorithm
*/
#include "agc.h"
#include <libcamera/base/log.h>
#include "../metadata.h"
using namespace RPiController;
using namespace libcamera;
using libcamera::utils::Duration;
using namespace std::literals::chrono_literals;
LOG_DEFINE_CATEGORY(RPiAgc)
#define NAME "rpi.agc"
Agc::Agc(Controller *controller)
: AgcAlgorithm(controller),
activeChannels_({ 0 }), index_(0)
{
}
char const *Agc::name() const
{
return NAME;
}
int Agc::read(const libcamera::YamlObject ¶ms)
{
/*
* When there is only a single channel we can read the old style syntax.
* Otherwise we expect a "channels" keyword followed by a list of configurations.
*/
if (!params.contains("channels")) {
LOG(RPiAgc, Debug) << "Single channel only";
channelTotalExposures_.resize(1, 0s);
channelData_.emplace_back();
return channelData_.back().channel.read(params, getHardwareConfig());
}
const auto &channels = params["channels"].asList();
for (auto ch = channels.begin(); ch != channels.end(); ch++) {
LOG(RPiAgc, Debug) << "Read AGC channel";
channelData_.emplace_back();
int ret = channelData_.back().channel.read(*ch, getHardwareConfig());
if (ret)
return ret;
}
LOG(RPiAgc, Debug) << "Read " << channelData_.size() << " channel(s)";
if (channelData_.empty()) {
LOG(RPiAgc, Error) << "No AGC channels provided";
return -1;
}
channelTotalExposures_.resize(channelData_.size(), 0s);
return 0;
}
int Agc::checkChannel(unsigned int channelIndex) const
{
if (channelIndex >= channelData_.size()) {
LOG(RPiAgc, Warning) << "AGC channel " << channelIndex << " not available";
return -1;
}
return 0;
}
void Agc::disableAuto()
{
LOG(RPiAgc, Debug) << "disableAuto";
/* All channels are enabled/disabled together. */
for (auto &data : channelData_)
data.channel.disableAuto();
}
void Agc::enableAuto()
{
LOG(RPiAgc, Debug) << "enableAuto";
/* All channels are enabled/disabled together. */
for (auto &data : channelData_)
data.channel.enableAuto();
}
unsigned int Agc::getConvergenceFrames() const
{
/* If there are n channels, it presumably takes n times as long to converge. */
return channelData_[0].channel.getConvergenceFrames() * activeChannels_.size();
}
std::vector<double> const &Agc::getWeights() const
{
/*
* In future the metering weights may be determined differently, making it
* difficult to associate different sets of weight with different channels.
* Therefore we shall impose a limitation, at least for now, that all
* channels will use the same weights.
*/
return channelData_[0].channel.getWeights();
}
void Agc::setEv(unsigned int channelIndex, double ev)
{
if (checkChannel(channelIndex))
return;
LOG(RPiAgc, Debug) << "setEv " << ev << " for channel " << channelIndex;
channelData_[channelIndex].channel.setEv(ev);
}
void Agc::setFlickerPeriod(Duration flickerPeriod)
{
LOG(RPiAgc, Debug) << "setFlickerPeriod " << flickerPeriod;
/* Flicker period will be the same across all channels. */
for (auto &data : channelData_)
data.channel.setFlickerPeriod(flickerPeriod);
}
void Agc::setMaxShutter(Duration maxShutter)
{
/* Frame durations will be the same across all channels too. */
for (auto &data : channelData_)
data.channel.setMaxShutter(maxShutter);
}
void Agc::setFixedShutter(unsigned int channelIndex, Duration fixedShutter)
{
if (checkChannel(channelIndex))
return;
LOG(RPiAgc, Debug) << "setFixedShutter " << fixedShutter
<< " for channel " << channelIndex;
channelData_[channelIndex].channel.setFixedShutter(fixedShutter);
}
void Agc::setFixedAnalogueGain(unsigned int channelIndex, double fixedAnalogueGain)
{
if (checkChannel(channelIndex))
return;
LOG(RPiAgc, Debug) << "setFixedAnalogueGain " << fixedAnalogueGain
<< " for channel " << channelIndex;
channelData_[channelIndex].channel.setFixedAnalogueGain(fixedAnalogueGain);
}
void Agc::setMeteringMode(std::string const &meteringModeName)
{
/* Metering modes will be the same across all channels too. */
for (auto &data : channelData_)
data.channel.setMeteringMode(meteringModeName);
}
void Agc::setExposureMode(std::string const &exposureModeName)
{
LOG(RPiAgc, Debug) << "setExposureMode " << exposureModeName;
/* Exposure mode will be the same across all channels. */
for (auto &data : channelData_)
data.channel.setExposureMode(exposureModeName);
}
void Agc::setConstraintMode(std::string const &constraintModeName)
{
LOG(RPiAgc, Debug) << "setConstraintMode " << constraintModeName;
/* Constraint mode will be the same across all channels. */
for (auto &data : channelData_)
data.channel.setConstraintMode(constraintModeName);
}
template<typename T>
std::ostream &operator<<(std::ostream &os, const std::vector<T> &v)
{
os << "{";
for (const auto &e : v)
os << " " << e;
os << " }";
return os;
}
void Agc::setActiveChannels(const std::vector<unsigned int> &activeChannels)
{
if (activeChannels.empty()) {
LOG(RPiAgc, Warning) << "No active AGC channels supplied";
return;
}
for (auto index : activeChannels)
if (checkChannel(index))
return;
LOG(RPiAgc, Debug) << "setActiveChannels " << activeChannels;
activeChannels_ = activeChannels;
index_ = 0;
}
void Agc::switchMode(CameraMode const &cameraMode,
Metadata *metadata)
{
/*
* We run switchMode on every channel, and then we're going to start over
* with the first active channel again which means that this channel's
* status needs to be the one we leave in the metadata.
*/
AgcStatus status;
for (unsigned int channelIndex = 0; channelIndex < channelData_.size(); channelIndex++) {
LOG(RPiAgc, Debug) << "switchMode for channel " << channelIndex;
channelData_[channelIndex].channel.switchMode(cameraMode, metadata);
if (channelIndex == activeChannels_[0])
metadata->get("agc.status", status);
}
status.channel = activeChannels_[0];
metadata->set("agc.status", status);
index_ = 0;
}
static void getDelayedChannelIndex(Metadata *metadata, const char *message, unsigned int &channelIndex)
{
std::unique_lock<RPiController::Metadata> lock(*metadata);
AgcStatus *status = metadata->getLocked<AgcStatus>("agc.delayed_status");
if (status)
channelIndex = status->channel;
else {
/* This does happen at startup, otherwise it would be a Warning or Error. */
LOG(RPiAgc, Debug) << message;
}
}
static libcamera::utils::Duration
setCurrentChannelIndexGetExposure(Metadata *metadata, const char *message, unsigned int channelIndex)
{
std::unique_lock<RPiController::Metadata> lock(*metadata);
AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status");
libcamera::utils::Duration dur = 0s;
if (status) {
status->channel = channelIndex;
dur = status->totalExposureValue;
} else {
/* This does happen at startup, otherwise it would be a Warning or Error. */
LOG(RPiAgc, Debug) << message;
}
return dur;
}
void Agc::prepare(Metadata *imageMetadata)
{
/*
* The DeviceStatus in the metadata should be correct for the image we
* are processing. The delayed status should tell us what channel this frame
* was from, so we will use that channel's prepare method.
*
* \todo To be honest, there's not much that's stateful in the prepare methods
* so we should perhaps re-evaluate whether prepare even needs to be done
* "per channel".
*/
unsigned int channelIndex = activeChannels_[0];
getDelayedChannelIndex(imageMetadata, "prepare: no delayed status", channelIndex);
LOG(RPiAgc, Debug) << "prepare for channel " << channelIndex;
channelData_[channelIndex].channel.prepare(imageMetadata);
}
void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
/*
* We want to generate values for the next channel in round robin fashion
* (i.e. the channel at location index_ in the activeChannel list), even though
* the statistics we have will be for a different channel (which we find
* again from the delayed status).
*/
/* Generate updated AGC values for channel for new channel that we are requesting. */
unsigned int channelIndex = activeChannels_[index_];
AgcChannelData &channelData = channelData_[channelIndex];
/* The stats that arrived with this image correspond to the following channel. */
unsigned int statsIndex = 0;
getDelayedChannelIndex(imageMetadata, "process: no delayed status for stats", statsIndex);
LOG(RPiAgc, Debug) << "process for channel " << channelIndex;
/*
* We keep a cache of the most recent DeviceStatus and stats for each channel,
* so that we can invoke the next channel's process method with the most up to date
* values.
*/
LOG(RPiAgc, Debug) << "Save DeviceStatus and stats for channel " << statsIndex;
DeviceStatus deviceStatus;
if (imageMetadata->get<DeviceStatus>("device.status", deviceStatus) == 0)
channelData_[statsIndex].deviceStatus = deviceStatus;
else
/* Every frame should have a DeviceStatus. */
LOG(RPiAgc, Error) << "process: no device status found";
channelData_[statsIndex].statistics = stats;
/*
* Finally fetch the most recent DeviceStatus and stats for the new channel, if both
* exist, and call process(). We must make the agc.status metadata record correctly
* which channel this is.
*/
StatisticsPtr *statsPtr = &stats;
if (channelData.statistics && channelData.deviceStatus) {
deviceStatus = *channelData.deviceStatus;
statsPtr = &channelData.statistics;
} else {
/* Can also happen when new channels start. */
LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet";
}
channelData.channel.process(*statsPtr, deviceStatus, imageMetadata, channelTotalExposures_);
auto dur = setCurrentChannelIndexGetExposure(imageMetadata, "process: no AGC status found",
channelIndex);
if (dur)
channelTotalExposures_[channelIndex] = dur;
/* And onto the next channel for the next call. */
index_ = (index_ + 1) % activeChannels_.size();
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Agc(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/dpc.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* DPC (defective pixel correction) control algorithm
*/
#pragma once
#include "../algorithm.h"
#include "../dpc_status.h"
namespace RPiController {
/* Back End algorithm to apply appropriate GEQ settings. */
struct DpcConfig {
int strength;
};
class Dpc : public Algorithm
{
public:
Dpc(Controller *controller);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void prepare(Metadata *imageMetadata) override;
private:
DpcConfig config_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/alsc.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* ALSC (auto lens shading correction) control algorithm
*/
#pragma once
#include <array>
#include <mutex>
#include <condition_variable>
#include <thread>
#include <vector>
#include <libcamera/geometry.h>
#include "../algorithm.h"
#include "../alsc_status.h"
#include "../statistics.h"
namespace RPiController {
/* Algorithm to generate automagic LSC (Lens Shading Correction) tables. */
/*
* The Array2D class is a very thin wrapper round std::vector so that it can
* be used in exactly the same way in the code but carries its correct width
* and height ("dimensions") with it.
*/
template<typename T>
class Array2D
{
public:
using Size = libcamera::Size;
const Size &dimensions() const { return dimensions_; }
size_t size() const { return data_.size(); }
const std::vector<T> &data() const { return data_; }
void resize(const Size &dims)
{
dimensions_ = dims;
data_.resize(dims.width * dims.height);
}
void resize(const Size &dims, const T &value)
{
resize(dims);
std::fill(data_.begin(), data_.end(), value);
}
T &operator[](int index) { return data_[index]; }
const T &operator[](int index) const { return data_[index]; }
T *ptr() { return data_.data(); }
const T *ptr() const { return data_.data(); }
auto begin() { return data_.begin(); }
auto end() { return data_.end(); }
private:
Size dimensions_;
std::vector<T> data_;
};
/*
* We'll use the term SparseArray for the large sparse matrices that are
* XY tall but have only 4 non-zero elements on each row.
*/
template<typename T>
using SparseArray = std::vector<std::array<T, 4>>;
struct AlscCalibration {
double ct;
Array2D<double> table;
};
struct AlscConfig {
/* Only repeat the ALSC calculation every "this many" frames */
uint16_t framePeriod;
/* number of initial frames for which speed taken as 1.0 (maximum) */
uint16_t startupFrames;
/* IIR filter speed applied to algorithm results */
double speed;
double sigmaCr;
double sigmaCb;
double minCount;
uint16_t minG;
double omega;
uint32_t nIter;
Array2D<double> luminanceLut;
double luminanceStrength;
std::vector<AlscCalibration> calibrationsCr;
std::vector<AlscCalibration> calibrationsCb;
double defaultCt; /* colour temperature if no metadata found */
double threshold; /* iteration termination threshold */
double lambdaBound; /* upper/lower bound for lambda from a value of 1 */
libcamera::Size tableSize;
};
class Alsc : public Algorithm
{
public:
Alsc(Controller *controller = NULL);
~Alsc();
char const *name() const override;
void initialise() override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
int read(const libcamera::YamlObject ¶ms) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
/* configuration is read-only, and available to both threads */
AlscConfig config_;
bool firstTime_;
CameraMode cameraMode_;
Array2D<double> luminanceTable_;
std::thread asyncThread_;
void asyncFunc(); /* asynchronous thread function */
std::mutex mutex_;
/* condvar for async thread to wait on */
std::condition_variable asyncSignal_;
/* condvar for synchronous thread to wait on */
std::condition_variable syncSignal_;
/* for sync thread to check if async thread finished (requires mutex) */
bool asyncFinished_;
/* for async thread to check if it's been told to run (requires mutex) */
bool asyncStart_;
/* for async thread to check if it's been told to quit (requires mutex) */
bool asyncAbort_;
/*
* The following are only for the synchronous thread to use:
* for sync thread to note its has asked async thread to run
*/
bool asyncStarted_;
/* counts up to framePeriod before restarting the async thread */
int framePhase_;
/* counts up to startupFrames */
int frameCount_;
/* counts up to startupFrames for Process function */
int frameCount2_;
std::array<Array2D<double>, 3> syncResults_;
std::array<Array2D<double>, 3> prevSyncResults_;
void waitForAysncThread();
/*
* The following are for the asynchronous thread to use, though the main
* thread can set/reset them if the async thread is known to be idle:
*/
void restartAsync(StatisticsPtr &stats, Metadata *imageMetadata);
/* copy out the results from the async thread so that it can be restarted */
void fetchAsyncResults();
double ct_;
RgbyRegions statistics_;
std::array<Array2D<double>, 3> asyncResults_;
Array2D<double> asyncLambdaR_;
Array2D<double> asyncLambdaB_;
void doAlsc();
Array2D<double> lambdaR_;
Array2D<double> lambdaB_;
/* Temporaries for the computations */
std::array<Array2D<double>, 5> tmpC_;
std::array<SparseArray<double>, 3> tmpM_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/lux.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* Lux control algorithm
*/
#include <math.h>
#include <libcamera/base/log.h>
#include "../device_status.h"
#include "lux.h"
using namespace RPiController;
using namespace libcamera;
using namespace std::literals::chrono_literals;
LOG_DEFINE_CATEGORY(RPiLux)
#define NAME "rpi.lux"
Lux::Lux(Controller *controller)
: Algorithm(controller)
{
/*
* Put in some defaults as there will be no meaningful values until
* Process has run.
*/
status_.aperture = 1.0;
status_.lux = 400;
}
char const *Lux::name() const
{
return NAME;
}
int Lux::read(const libcamera::YamlObject ¶ms)
{
auto value = params["reference_shutter_speed"].get<double>();
if (!value)
return -EINVAL;
referenceShutterSpeed_ = *value * 1.0us;
value = params["reference_gain"].get<double>();
if (!value)
return -EINVAL;
referenceGain_ = *value;
referenceAperture_ = params["reference_aperture"].get<double>(1.0);
value = params["reference_Y"].get<double>();
if (!value)
return -EINVAL;
referenceY_ = *value;
value = params["reference_lux"].get<double>();
if (!value)
return -EINVAL;
referenceLux_ = *value;
currentAperture_ = referenceAperture_;
return 0;
}
void Lux::setCurrentAperture(double aperture)
{
currentAperture_ = aperture;
}
void Lux::prepare(Metadata *imageMetadata)
{
std::unique_lock<std::mutex> lock(mutex_);
imageMetadata->set("lux.status", status_);
}
void Lux::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
DeviceStatus deviceStatus;
if (imageMetadata->get("device.status", deviceStatus) == 0) {
double currentGain = deviceStatus.analogueGain;
double currentAperture = deviceStatus.aperture.value_or(currentAperture_);
double currentY = stats->yHist.interQuantileMean(0, 1);
double gainRatio = referenceGain_ / currentGain;
double shutterSpeedRatio =
referenceShutterSpeed_ / deviceStatus.shutterSpeed;
double apertureRatio = referenceAperture_ / currentAperture;
double yRatio = currentY * (65536 / stats->yHist.bins()) / referenceY_;
double estimatedLux = shutterSpeedRatio * gainRatio *
apertureRatio * apertureRatio *
yRatio * referenceLux_;
LuxStatus status;
status.lux = estimatedLux;
status.aperture = currentAperture;
LOG(RPiLux, Debug) << ": estimated lux " << estimatedLux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
}
/*
* Overwrite the metadata here as well, so that downstream
* algorithms get the latest value.
*/
imageMetadata->set("lux.status", status);
} else
LOG(RPiLux, Warning) << ": no device metadata";
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Lux(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/contrast.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* contrast (gamma) control algorithm
*/
#pragma once
#include <mutex>
#include <libipa/pwl.h>
#include "../contrast_algorithm.h"
namespace RPiController {
/*
* Back End algorithm to appaly correct digital gain. Should be placed after
* Back End AWB.
*/
struct ContrastConfig {
bool ceEnable;
double loHistogram;
double loLevel;
double loMax;
double hiHistogram;
double hiLevel;
double hiMax;
libcamera::ipa::Pwl gammaCurve;
};
class Contrast : public ContrastAlgorithm
{
public:
Contrast(Controller *controller = NULL);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void setBrightness(double brightness) override;
void setContrast(double contrast) override;
void enableCe(bool enable) override;
void restoreCe() override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
ContrastConfig config_;
double brightness_;
double contrast_;
ContrastStatus status_;
double ceEnable_;
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/hdr.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2023, Raspberry Pi Ltd
*
* HDR control algorithm
*/
#pragma once
#include <map>
#include <string>
#include <vector>
#include <libcamera/geometry.h>
#include <libipa/pwl.h>
#include "../hdr_algorithm.h"
#include "../hdr_status.h"
/* This is our implementation of an HDR algorithm. */
namespace RPiController {
struct HdrConfig {
std::string name;
std::vector<unsigned int> cadence;
std::map<unsigned int, std::string> channelMap;
/* Lens shading related parameters. */
libcamera::ipa::Pwl spatialGainCurve; /* Brightness to gain curve for different image regions. */
unsigned int diffusion; /* How much to diffuse the gain spatially. */
/* Tonemap related parameters. */
bool tonemapEnable;
uint16_t detailConstant;
double detailSlope;
double iirStrength;
double strength;
libcamera::ipa::Pwl tonemap;
/* These relate to adaptive tonemap calculation. */
double speed;
std::vector<double> hiQuantileTargets; /* quantiles to check for unsaturated images */
double hiQuantileMaxGain; /* the max gain we'll apply when unsaturated */
std::vector<double> quantileTargets; /* target values for histogram quantiles */
double powerMin; /* minimum tonemap power */
double powerMax; /* maximum tonemap power */
std::vector<double> contrastAdjustments; /* any contrast adjustment factors */
/* Stitch related parameters. */
bool stitchEnable;
uint16_t thresholdLo;
uint8_t diffPower;
double motionThreshold;
void read(const libcamera::YamlObject ¶ms, const std::string &name);
};
class Hdr : public HdrAlgorithm
{
public:
Hdr(Controller *controller);
char const *name() const override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
int read(const libcamera::YamlObject ¶ms) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
int setMode(std::string const &mode) override;
std::vector<unsigned int> getChannels() const override;
private:
void updateAgcStatus(Metadata *metadata);
void updateGains(StatisticsPtr &stats, HdrConfig &config);
bool updateTonemap(StatisticsPtr &stats, HdrConfig &config);
std::map<std::string, HdrConfig> config_;
HdrStatus status_; /* track the current HDR mode and channel */
HdrStatus delayedStatus_; /* track the delayed HDR mode and channel */
std::string previousMode_;
libcamera::ipa::Pwl tonemap_;
libcamera::Size regions_; /* stats regions */
unsigned int numRegions_; /* total number of stats regions */
std::vector<double> gains_[2];
};
} /* namespace RPiController */
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/black_level.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019, Raspberry Pi Ltd
*
* black level control algorithm
*/
#include <math.h>
#include <stdint.h>
#include <libcamera/base/log.h>
#include "../black_level_status.h"
#include "black_level.h"
using namespace RPiController;
using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiBlackLevel)
#define NAME "rpi.black_level"
BlackLevel::BlackLevel(Controller *controller)
: BlackLevelAlgorithm(controller)
{
}
char const *BlackLevel::name() const
{
return NAME;
}
int BlackLevel::read(const libcamera::YamlObject ¶ms)
{
/* 64 in 10 bits scaled to 16 bits */
uint16_t blackLevel = params["black_level"].get<uint16_t>(4096);
blackLevelR_ = params["black_level_r"].get<uint16_t>(blackLevel);
blackLevelG_ = params["black_level_g"].get<uint16_t>(blackLevel);
blackLevelB_ = params["black_level_b"].get<uint16_t>(blackLevel);
LOG(RPiBlackLevel, Debug)
<< " Read black levels red " << blackLevelR_
<< " green " << blackLevelG_
<< " blue " << blackLevelB_;
return 0;
}
void BlackLevel::initialValues(uint16_t &blackLevelR, uint16_t &blackLevelG,
uint16_t &blackLevelB)
{
blackLevelR = blackLevelR_;
blackLevelG = blackLevelG_;
blackLevelB = blackLevelB_;
}
void BlackLevel::prepare(Metadata *imageMetadata)
{
/*
* Possibly we should think about doing this in a switchMode or
* something?
*/
struct BlackLevelStatus status;
status.blackLevelR = blackLevelR_;
status.blackLevelG = blackLevelG_;
status.blackLevelB = blackLevelB_;
imageMetadata->set("black_level.status", status);
}
/* Register algorithm with the system. */
static Algorithm *create(Controller *controller)
{
return new BlackLevel(controller);
}
static RegisterAlgorithm reg(NAME, &create);
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/saturation.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2022, Raspberry Pi Ltd
*
* saturation.hpp - Saturation control algorithm
*/
#pragma once
#include "algorithm.h"
namespace RPiController {
struct SaturationConfig {
uint8_t shiftR;
uint8_t shiftG;
uint8_t shiftB;
};
class Saturation : public Algorithm
{
public:
Saturation(Controller *controller = NULL);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
private:
SaturationConfig config_;
};
} // namespace RPiController
|
0 | repos/libcamera/src/ipa/rpi/controller | repos/libcamera/src/ipa/rpi/controller/rpi/tonemap.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2022, Raspberry Pi Ltd
*
* tonemap.hpp - Tonemap control algorithm
*/
#pragma once
#include <libipa/pwl.h>
#include "algorithm.h"
namespace RPiController {
struct TonemapConfig {
uint16_t detailConstant;
double detailSlope;
double iirStrength;
double strength;
libcamera::ipa::Pwl tonemap;
};
class Tonemap : public Algorithm
{
public:
Tonemap(Controller *controller = NULL);
char const *name() const override;
int read(const libcamera::YamlObject ¶ms) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
private:
TonemapConfig config_;
};
} // namespace RPiController
|
0 | repos/libcamera/src/ipa/rpi | repos/libcamera/src/ipa/rpi/common/ipa_base.h | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2023, Raspberry Pi Ltd
*
* Raspberry Pi IPA base class
*/
#pragma once
#include <array>
#include <deque>
#include <map>
#include <stdint.h>
#include <libcamera/base/utils.h>
#include <libcamera/controls.h>
#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include "libcamera/internal/mapped_framebuffer.h"
#include "cam_helper/cam_helper.h"
#include "controller/agc_status.h"
#include "controller/camera_mode.h"
#include "controller/controller.h"
#include "controller/hdr_status.h"
#include "controller/metadata.h"
namespace libcamera {
namespace ipa::RPi {
class IpaBase : public IPARPiInterface
{
public:
IpaBase();
~IpaBase();
int32_t init(const IPASettings &settings, const InitParams ¶ms, InitResult *result) override;
int32_t configure(const IPACameraSensorInfo &sensorInfo, const ConfigParams ¶ms,
ConfigResult *result) override;
void start(const ControlList &controls, StartResult *result) override;
void stop() override {}
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void prepareIsp(const PrepareParams ¶ms) override;
void processStats(const ProcessParams ¶ms) override;
protected:
bool monoSensor() const
{
return monoSensor_;
}
/* Raspberry Pi controller specific defines. */
std::unique_ptr<RPiController::CamHelper> helper_;
RPiController::Controller controller_;
ControlInfoMap sensorCtrls_;
ControlInfoMap lensCtrls_;
/* Camera sensor params. */
CameraMode mode_;
/* Track the frame length times over FrameLengthsQueueSize frames. */
std::deque<utils::Duration> frameLengths_;
utils::Duration lastTimeout_;
ControlList libcameraMetadata_;
bool statsMetadataOutput_;
/* Remember the HDR status after a mode switch. */
HdrStatus hdrStatus_;
/* Whether the stitch block (if available) needs to swap buffers. */
bool stitchSwapBuffers_;
private:
/* Number of metadata objects available in the context list. */
static constexpr unsigned int numMetadataContexts = 16;
virtual int32_t platformInit(const InitParams ¶ms, InitResult *result) = 0;
virtual int32_t platformStart(const ControlList &controls, StartResult *result) = 0;
virtual int32_t platformConfigure(const ConfigParams ¶ms, ConfigResult *result) = 0;
virtual void platformPrepareIsp(const PrepareParams ¶ms,
RPiController::Metadata &rpiMetadata) = 0;
virtual RPiController::StatisticsPtr platformProcessStats(Span<uint8_t> mem) = 0;
void setMode(const IPACameraSensorInfo &sensorInfo);
void setCameraTimeoutValue();
bool validateSensorControls();
bool validateLensControls();
void applyControls(const ControlList &controls);
virtual void handleControls(const ControlList &controls) = 0;
void fillDeviceStatus(const ControlList &sensorControls, unsigned int ipaContext);
void reportMetadata(unsigned int ipaContext);
void applyFrameDurations(utils::Duration minFrameDuration, utils::Duration maxFrameDuration);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
std::map<unsigned int, MappedFrameBuffer> buffers_;
bool lensPresent_;
bool monoSensor_;
std::array<RPiController::Metadata, numMetadataContexts> rpiMetadata_;
/*
* We count frames to decide if the frame must be hidden (e.g. from
* display) or mistrusted (i.e. not given to the control algos).
*/
uint64_t frameCount_;
/* How many frames we should avoid running control algos on. */
unsigned int mistrustCount_;
/* Number of frames that need to be dropped on startup. */
unsigned int dropFrameCount_;
/* Frame timestamp for the last run of the controller. */
uint64_t lastRunTimestamp_;
/* Do we run a Controller::process() for this frame? */
bool processPending_;
/* Distinguish the first camera start from others. */
bool firstStart_;
/* Frame duration (1/fps) limits. */
utils::Duration minFrameDuration_;
utils::Duration maxFrameDuration_;
/* The current state of flicker avoidance. */
struct FlickerState {
int32_t mode;
utils::Duration manualPeriod;
} flickerState_;
};
} /* namespace ipa::RPi */
} /* namespace libcamera */
|
0 | repos/libcamera/src/ipa/rpi | repos/libcamera/src/ipa/rpi/common/ipa_base.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019-2023, Raspberry Pi Ltd
*
* Raspberry Pi IPA base class
*/
#include "ipa_base.h"
#include <cmath>
#include <libcamera/base/log.h>
#include <libcamera/base/span.h>
#include <libcamera/control_ids.h>
#include <libcamera/property_ids.h>
#include "controller/af_algorithm.h"
#include "controller/af_status.h"
#include "controller/agc_algorithm.h"
#include "controller/awb_algorithm.h"
#include "controller/awb_status.h"
#include "controller/black_level_status.h"
#include "controller/ccm_algorithm.h"
#include "controller/ccm_status.h"
#include "controller/contrast_algorithm.h"
#include "controller/denoise_algorithm.h"
#include "controller/hdr_algorithm.h"
#include "controller/lux_status.h"
#include "controller/sharpen_algorithm.h"
#include "controller/statistics.h"
namespace libcamera {
using namespace std::literals::chrono_literals;
using utils::Duration;
namespace {
/* Number of frame length times to hold in the queue. */
constexpr unsigned int FrameLengthsQueueSize = 10;
/* Configure the sensor with these values initially. */
constexpr double defaultAnalogueGain = 1.0;
constexpr Duration defaultExposureTime = 20.0ms;
constexpr Duration defaultMinFrameDuration = 1.0s / 30.0;
constexpr Duration defaultMaxFrameDuration = 250.0s;
/*
* Determine the minimum allowable inter-frame duration to run the controller
* algorithms. If the pipeline handler provider frames at a rate higher than this,
* we rate-limit the controller Prepare() and Process() calls to lower than or
* equal to this rate.
*/
constexpr Duration controllerMinFrameDuration = 1.0s / 30.0;
/* List of controls handled by the Raspberry Pi IPA */
const ControlInfoMap::Map ipaControls{
{ &controls::AeEnable, ControlInfo(false, true) },
{ &controls::ExposureTime, ControlInfo(0, 66666) },
{ &controls::AnalogueGain, ControlInfo(1.0f, 16.0f) },
{ &controls::AeMeteringMode, ControlInfo(controls::AeMeteringModeValues) },
{ &controls::AeConstraintMode, ControlInfo(controls::AeConstraintModeValues) },
{ &controls::AeExposureMode, ControlInfo(controls::AeExposureModeValues) },
{ &controls::ExposureValue, ControlInfo(-8.0f, 8.0f, 0.0f) },
{ &controls::AeFlickerMode, ControlInfo(static_cast<int>(controls::FlickerOff),
static_cast<int>(controls::FlickerManual),
static_cast<int>(controls::FlickerOff)) },
{ &controls::AeFlickerPeriod, ControlInfo(100, 1000000) },
{ &controls::Brightness, ControlInfo(-1.0f, 1.0f, 0.0f) },
{ &controls::Contrast, ControlInfo(0.0f, 32.0f, 1.0f) },
{ &controls::HdrMode, ControlInfo(controls::HdrModeValues) },
{ &controls::Sharpness, ControlInfo(0.0f, 16.0f, 1.0f) },
{ &controls::ScalerCrop, ControlInfo(Rectangle{}, Rectangle(65535, 65535, 65535, 65535), Rectangle{}) },
{ &controls::FrameDurationLimits, ControlInfo(INT64_C(33333), INT64_C(120000)) },
{ &controls::draft::NoiseReductionMode, ControlInfo(controls::draft::NoiseReductionModeValues) },
{ &controls::rpi::StatsOutputEnable, ControlInfo(false, true, false) },
};
/* IPA controls handled conditionally, if the sensor is not mono */
const ControlInfoMap::Map ipaColourControls{
{ &controls::AwbEnable, ControlInfo(false, true) },
{ &controls::AwbMode, ControlInfo(controls::AwbModeValues) },
{ &controls::ColourGains, ControlInfo(0.0f, 32.0f) },
{ &controls::Saturation, ControlInfo(0.0f, 32.0f, 1.0f) },
};
/* IPA controls handled conditionally, if the lens has a focus control */
const ControlInfoMap::Map ipaAfControls{
{ &controls::AfMode, ControlInfo(controls::AfModeValues) },
{ &controls::AfRange, ControlInfo(controls::AfRangeValues) },
{ &controls::AfSpeed, ControlInfo(controls::AfSpeedValues) },
{ &controls::AfMetering, ControlInfo(controls::AfMeteringValues) },
{ &controls::AfWindows, ControlInfo(Rectangle{}, Rectangle(65535, 65535, 65535, 65535), Rectangle{}) },
{ &controls::AfTrigger, ControlInfo(controls::AfTriggerValues) },
{ &controls::AfPause, ControlInfo(controls::AfPauseValues) },
{ &controls::LensPosition, ControlInfo(0.0f, 32.0f, 1.0f) }
};
} /* namespace */
LOG_DEFINE_CATEGORY(IPARPI)
namespace ipa::RPi {
IpaBase::IpaBase()
: controller_(), frameLengths_(FrameLengthsQueueSize, 0s), statsMetadataOutput_(false),
stitchSwapBuffers_(false), frameCount_(0), mistrustCount_(0), lastRunTimestamp_(0),
firstStart_(true), flickerState_({ 0, 0s })
{
}
IpaBase::~IpaBase()
{
}
int32_t IpaBase::init(const IPASettings &settings, const InitParams ¶ms, InitResult *result)
{
/*
* Load the "helper" for this sensor. This tells us all the device specific stuff
* that the kernel driver doesn't. We only do this the first time; we don't need
* to re-parse the metadata after a simple mode-switch for no reason.
*/
helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::create(settings.sensorModel));
if (!helper_) {
LOG(IPARPI, Error) << "Could not create camera helper for "
<< settings.sensorModel;
return -EINVAL;
}
/*
* Pass out the sensor config to the pipeline handler in order
* to setup the staggered writer class.
*/
int gainDelay, exposureDelay, vblankDelay, hblankDelay, sensorMetadata;
helper_->getDelays(exposureDelay, gainDelay, vblankDelay, hblankDelay);
sensorMetadata = helper_->sensorEmbeddedDataPresent();
result->sensorConfig.gainDelay = gainDelay;
result->sensorConfig.exposureDelay = exposureDelay;
result->sensorConfig.vblankDelay = vblankDelay;
result->sensorConfig.hblankDelay = hblankDelay;
result->sensorConfig.sensorMetadata = sensorMetadata;
/* Load the tuning file for this sensor. */
int ret = controller_.read(settings.configurationFile.c_str());
if (ret) {
LOG(IPARPI, Error)
<< "Failed to load tuning data file "
<< settings.configurationFile;
return ret;
}
lensPresent_ = params.lensPresent;
controller_.initialise();
/* Return the controls handled by the IPA */
ControlInfoMap::Map ctrlMap = ipaControls;
if (lensPresent_)
ctrlMap.merge(ControlInfoMap::Map(ipaAfControls));
monoSensor_ = params.sensorInfo.cfaPattern == properties::draft::ColorFilterArrangementEnum::MONO;
if (!monoSensor_)
ctrlMap.merge(ControlInfoMap::Map(ipaColourControls));
result->controlInfo = ControlInfoMap(std::move(ctrlMap), controls::controls);
return platformInit(params, result);
}
int32_t IpaBase::configure(const IPACameraSensorInfo &sensorInfo, const ConfigParams ¶ms,
ConfigResult *result)
{
sensorCtrls_ = params.sensorControls;
if (!validateSensorControls()) {
LOG(IPARPI, Error) << "Sensor control validation failed.";
return -1;
}
if (lensPresent_) {
lensCtrls_ = params.lensControls;
if (!validateLensControls()) {
LOG(IPARPI, Warning) << "Lens validation failed, "
<< "no lens control will be available.";
lensPresent_ = false;
}
}
/* Setup a metadata ControlList to output metadata. */
libcameraMetadata_ = ControlList(controls::controls);
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
mode_.transform = static_cast<libcamera::Transform>(params.transform);
/* Pass the camera mode to the CamHelper to setup algorithms. */
helper_->setCameraMode(mode_);
/*
* Initialise this ControlList correctly, even if empty, in case the IPA is
* running is isolation mode (passing the ControlList through the IPC layer).
*/
ControlList ctrls(sensorCtrls_);
/* The pipeline handler passes out the mode's sensitivity. */
result->modeSensitivity = mode_.sensitivity;
if (firstStart_) {
/* Supply initial values for frame durations. */
applyFrameDurations(defaultMinFrameDuration, defaultMaxFrameDuration);
/* Supply initial values for gain and exposure. */
AgcStatus agcStatus;
agcStatus.shutterTime = defaultExposureTime;
agcStatus.analogueGain = defaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
/*
* Set the lens to the default (typically hyperfocal) position
* on first start.
*/
if (lensPresent_) {
RPiController::AfAlgorithm *af =
dynamic_cast<RPiController::AfAlgorithm *>(controller_.getAlgorithm("af"));
if (af) {
float defaultPos =
ipaAfControls.at(&controls::LensPosition).def().get<float>();
ControlList lensCtrl(lensCtrls_);
int32_t hwpos;
af->setLensPosition(defaultPos, &hwpos);
lensCtrl.set(V4L2_CID_FOCUS_ABSOLUTE, hwpos);
result->lensControls = std::move(lensCtrl);
}
}
}
result->sensorControls = std::move(ctrls);
/*
* Apply the correct limits to the exposure, gain and frame duration controls
* based on the current sensor mode.
*/
ControlInfoMap::Map ctrlMap = ipaControls;
ctrlMap[&controls::FrameDurationLimits] =
ControlInfo(static_cast<int64_t>(mode_.minFrameDuration.get<std::micro>()),
static_cast<int64_t>(mode_.maxFrameDuration.get<std::micro>()));
ctrlMap[&controls::AnalogueGain] =
ControlInfo(static_cast<float>(mode_.minAnalogueGain),
static_cast<float>(mode_.maxAnalogueGain));
ctrlMap[&controls::ExposureTime] =
ControlInfo(static_cast<int32_t>(mode_.minShutter.get<std::micro>()),
static_cast<int32_t>(mode_.maxShutter.get<std::micro>()));
/* Declare colour processing related controls for non-mono sensors. */
if (!monoSensor_)
ctrlMap.merge(ControlInfoMap::Map(ipaColourControls));
/* Declare Autofocus controls, only if we have a controllable lens */
if (lensPresent_)
ctrlMap.merge(ControlInfoMap::Map(ipaAfControls));
result->controlInfo = ControlInfoMap(std::move(ctrlMap), controls::controls);
return platformConfigure(params, result);
}
void IpaBase::start(const ControlList &controls, StartResult *result)
{
RPiController::Metadata metadata;
if (!controls.empty()) {
/* We have been given some controls to action before start. */
applyControls(controls);
}
controller_.switchMode(mode_, &metadata);
/* Reset the frame lengths queue state. */
lastTimeout_ = 0s;
frameLengths_.clear();
frameLengths_.resize(FrameLengthsQueueSize, 0s);
/* SwitchMode may supply updated exposure/gain values to use. */
AgcStatus agcStatus;
agcStatus.shutterTime = 0.0s;
agcStatus.analogueGain = 0.0;
metadata.get("agc.status", agcStatus);
if (agcStatus.shutterTime && agcStatus.analogueGain) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
result->controls = std::move(ctrls);
setCameraTimeoutValue();
}
/* Make a note of this as it tells us the HDR status of the first few frames. */
hdrStatus_ = agcStatus.hdr;
/*
* Initialise frame counts, and decide how many frames must be hidden or
* "mistrusted", which depends on whether this is a startup from cold,
* or merely a mode switch in a running system.
*/
frameCount_ = 0;
if (firstStart_) {
dropFrameCount_ = helper_->hideFramesStartup();
mistrustCount_ = helper_->mistrustFramesStartup();
/*
* Query the AGC/AWB for how many frames they may take to
* converge sufficiently. Where these numbers are non-zero
* we must allow for the frames with bad statistics
* (mistrustCount_) that they won't see. But if zero (i.e.
* no convergence necessary), no frames need to be dropped.
*/
unsigned int agcConvergenceFrames = 0;
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (agc) {
agcConvergenceFrames = agc->getConvergenceFrames();
if (agcConvergenceFrames)
agcConvergenceFrames += mistrustCount_;
}
unsigned int awbConvergenceFrames = 0;
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.getAlgorithm("awb"));
if (awb) {
awbConvergenceFrames = awb->getConvergenceFrames();
if (awbConvergenceFrames)
awbConvergenceFrames += mistrustCount_;
}
dropFrameCount_ = std::max({ dropFrameCount_, agcConvergenceFrames, awbConvergenceFrames });
LOG(IPARPI, Debug) << "Drop " << dropFrameCount_ << " frames on startup";
} else {
dropFrameCount_ = helper_->hideFramesModeSwitch();
mistrustCount_ = helper_->mistrustFramesModeSwitch();
}
result->dropFrameCount = dropFrameCount_;
firstStart_ = false;
lastRunTimestamp_ = 0;
platformStart(controls, result);
}
void IpaBase::mapBuffers(const std::vector<IPABuffer> &buffers)
{
for (const IPABuffer &buffer : buffers) {
const FrameBuffer fb(buffer.planes);
buffers_.emplace(buffer.id,
MappedFrameBuffer(&fb, MappedFrameBuffer::MapFlag::ReadWrite));
}
}
void IpaBase::unmapBuffers(const std::vector<unsigned int> &ids)
{
for (unsigned int id : ids) {
auto it = buffers_.find(id);
if (it == buffers_.end())
continue;
buffers_.erase(id);
}
}
void IpaBase::prepareIsp(const PrepareParams ¶ms)
{
applyControls(params.requestControls);
/*
* At start-up, or after a mode-switch, we may want to
* avoid running the control algos for a few frames in case
* they are "unreliable".
*/
int64_t frameTimestamp = params.sensorControls.get(controls::SensorTimestamp).value_or(0);
unsigned int ipaContext = params.ipaContext % rpiMetadata_.size();
RPiController::Metadata &rpiMetadata = rpiMetadata_[ipaContext];
Span<uint8_t> embeddedBuffer;
rpiMetadata.clear();
fillDeviceStatus(params.sensorControls, ipaContext);
if (params.buffers.embedded) {
/*
* Pipeline handler has supplied us with an embedded data buffer,
* we must pass it to the CamHelper for parsing.
*/
auto it = buffers_.find(params.buffers.embedded);
ASSERT(it != buffers_.end());
embeddedBuffer = it->second.planes()[0];
}
/*
* AGC wants to know the algorithm status from the time it actioned the
* sensor exposure/gain changes. So fetch it from the metadata list
* indexed by the IPA cookie returned, and put it in the current frame
* metadata.
*
* Note if the HDR mode has changed, as things like tonemaps may need updating.
*/
AgcStatus agcStatus;
bool hdrChange = false;
RPiController::Metadata &delayedMetadata = rpiMetadata_[params.delayContext];
if (!delayedMetadata.get<AgcStatus>("agc.status", agcStatus)) {
rpiMetadata.set("agc.delayed_status", agcStatus);
hdrChange = agcStatus.hdr.mode != hdrStatus_.mode;
hdrStatus_ = agcStatus.hdr;
}
/*
* This may overwrite the DeviceStatus using values from the sensor
* metadata, and may also do additional custom processing.
*/
helper_->prepare(embeddedBuffer, rpiMetadata);
/* Allow a 10% margin on the comparison below. */
Duration delta = (frameTimestamp - lastRunTimestamp_) * 1.0ns;
if (lastRunTimestamp_ && frameCount_ > dropFrameCount_ &&
delta < controllerMinFrameDuration * 0.9 && !hdrChange) {
/*
* Ensure we merge the previous frame's metadata with the current
* frame. This will not overwrite exposure/gain values for the
* current frame, or any other bits of metadata that were added
* in helper_->Prepare().
*/
RPiController::Metadata &lastMetadata =
rpiMetadata_[(ipaContext ? ipaContext : rpiMetadata_.size()) - 1];
rpiMetadata.mergeCopy(lastMetadata);
processPending_ = false;
} else {
processPending_ = true;
lastRunTimestamp_ = frameTimestamp;
}
/*
* If the statistics are inline (i.e. already available with the Bayer
* frame), call processStats() now before prepare().
*/
if (controller_.getHardwareConfig().statsInline)
processStats({ params.buffers, params.ipaContext });
/* Do we need/want to call prepare? */
if (processPending_) {
controller_.prepare(&rpiMetadata);
/* Actually prepare the ISP parameters for the frame. */
platformPrepareIsp(params, rpiMetadata);
}
frameCount_++;
/* If the statistics are inline the metadata can be returned early. */
if (controller_.getHardwareConfig().statsInline)
reportMetadata(ipaContext);
/* Ready to push the input buffer into the ISP. */
prepareIspComplete.emit(params.buffers, stitchSwapBuffers_);
}
void IpaBase::processStats(const ProcessParams ¶ms)
{
unsigned int ipaContext = params.ipaContext % rpiMetadata_.size();
if (processPending_ && frameCount_ >= mistrustCount_) {
RPiController::Metadata &rpiMetadata = rpiMetadata_[ipaContext];
auto it = buffers_.find(params.buffers.stats);
if (it == buffers_.end()) {
LOG(IPARPI, Error) << "Could not find stats buffer!";
return;
}
RPiController::StatisticsPtr statistics = platformProcessStats(it->second.planes()[0]);
/* reportMetadata() will pick this up and set the FocusFoM metadata */
rpiMetadata.set("focus.status", statistics->focusRegions);
helper_->process(statistics, rpiMetadata);
controller_.process(statistics, &rpiMetadata);
struct AgcStatus agcStatus;
if (rpiMetadata.get("agc.status", agcStatus) == 0) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
setDelayedControls.emit(ctrls, ipaContext);
setCameraTimeoutValue();
}
}
/*
* If the statistics are not inline the metadata must be returned now,
* before the processStatsComplete signal.
*/
if (!controller_.getHardwareConfig().statsInline)
reportMetadata(ipaContext);
processStatsComplete.emit(params.buffers);
}
void IpaBase::setMode(const IPACameraSensorInfo &sensorInfo)
{
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
mode_.height = sensorInfo.outputSize.height;
mode_.sensorWidth = sensorInfo.activeAreaSize.width;
mode_.sensorHeight = sensorInfo.activeAreaSize.height;
mode_.cropX = sensorInfo.analogCrop.x;
mode_.cropY = sensorInfo.analogCrop.y;
mode_.pixelRate = sensorInfo.pixelRate;
/*
* Calculate scaling parameters. The scale_[xy] factors are determined
* by the ratio between the crop rectangle size and the output size.
*/
mode_.scaleX = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
mode_.scaleY = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
/*
* We're not told by the pipeline handler how scaling is split between
* binning and digital scaling. For now, as a heuristic, assume that
* downscaling up to 2 is achieved through binning, and that any
* additional scaling is achieved through digital scaling.
*
* \todo Get the pipeline handle to provide the full data
*/
mode_.binX = std::min(2, static_cast<int>(mode_.scaleX));
mode_.binY = std::min(2, static_cast<int>(mode_.scaleY));
/* The noise factor is the square root of the total binning factor. */
mode_.noiseFactor = std::sqrt(mode_.binX * mode_.binY);
/*
* Calculate the line length as the ratio between the line length in
* pixels and the pixel rate.
*/
mode_.minLineLength = sensorInfo.minLineLength * (1.0s / sensorInfo.pixelRate);
mode_.maxLineLength = sensorInfo.maxLineLength * (1.0s / sensorInfo.pixelRate);
/*
* Ensure that the maximum pixel processing rate does not exceed the ISP
* hardware capabilities. If it does, try adjusting the minimum line
* length to compensate if possible.
*/
Duration minPixelTime = controller_.getHardwareConfig().minPixelProcessingTime;
Duration pixelTime = mode_.minLineLength / mode_.width;
if (minPixelTime && pixelTime < minPixelTime) {
Duration adjustedLineLength = minPixelTime * mode_.width;
if (adjustedLineLength <= mode_.maxLineLength) {
LOG(IPARPI, Info)
<< "Adjusting mode minimum line length from " << mode_.minLineLength
<< " to " << adjustedLineLength << " because of ISP constraints.";
mode_.minLineLength = adjustedLineLength;
} else {
LOG(IPARPI, Error)
<< "Sensor minimum line length of " << pixelTime * mode_.width
<< " (" << 1us / pixelTime << " MPix/s)"
<< " is below the minimum allowable ISP limit of "
<< adjustedLineLength
<< " (" << 1us / minPixelTime << " MPix/s) ";
LOG(IPARPI, Error)
<< "THIS WILL CAUSE IMAGE CORRUPTION!!! "
<< "Please update the camera sensor driver to allow more horizontal blanking control.";
}
}
/*
* Set the frame length limits for the mode to ensure exposure and
* framerate calculations are clipped appropriately.
*/
mode_.minFrameLength = sensorInfo.minFrameLength;
mode_.maxFrameLength = sensorInfo.maxFrameLength;
/* Store these for convenience. */
mode_.minFrameDuration = mode_.minFrameLength * mode_.minLineLength;
mode_.maxFrameDuration = mode_.maxFrameLength * mode_.maxLineLength;
/*
* Some sensors may have different sensitivities in different modes;
* the CamHelper will know the correct value.
*/
mode_.sensitivity = helper_->getModeSensitivity(mode_);
const ControlInfo &gainCtrl = sensorCtrls_.at(V4L2_CID_ANALOGUE_GAIN);
const ControlInfo &shutterCtrl = sensorCtrls_.at(V4L2_CID_EXPOSURE);
mode_.minAnalogueGain = helper_->gain(gainCtrl.min().get<int32_t>());
mode_.maxAnalogueGain = helper_->gain(gainCtrl.max().get<int32_t>());
/*
* We need to give the helper the min/max frame durations so it can calculate
* the correct exposure limits below.
*/
helper_->setCameraMode(mode_);
/* Shutter speed is calculated based on the limits of the frame durations. */
mode_.minShutter = helper_->exposure(shutterCtrl.min().get<int32_t>(), mode_.minLineLength);
mode_.maxShutter = Duration::max();
helper_->getBlanking(mode_.maxShutter,
mode_.minFrameDuration, mode_.maxFrameDuration);
}
void IpaBase::setCameraTimeoutValue()
{
/*
* Take the maximum value of the exposure queue as the camera timeout
* value to pass back to the pipeline handler. Only signal if it has changed
* from the last set value.
*/
auto max = std::max_element(frameLengths_.begin(), frameLengths_.end());
if (*max != lastTimeout_) {
setCameraTimeout.emit(max->get<std::milli>());
lastTimeout_ = *max;
}
}
bool IpaBase::validateSensorControls()
{
static const uint32_t ctrls[] = {
V4L2_CID_ANALOGUE_GAIN,
V4L2_CID_EXPOSURE,
V4L2_CID_VBLANK,
V4L2_CID_HBLANK,
};
for (auto c : ctrls) {
if (sensorCtrls_.find(c) == sensorCtrls_.end()) {
LOG(IPARPI, Error) << "Unable to find sensor control "
<< utils::hex(c);
return false;
}
}
return true;
}
bool IpaBase::validateLensControls()
{
if (lensCtrls_.find(V4L2_CID_FOCUS_ABSOLUTE) == lensCtrls_.end()) {
LOG(IPARPI, Error) << "Unable to find Lens control V4L2_CID_FOCUS_ABSOLUTE";
return false;
}
return true;
}
/*
* Converting between enums (used in the libcamera API) and the names that
* we use to identify different modes. Unfortunately, the conversion tables
* must be kept up-to-date by hand.
*/
static const std::map<int32_t, std::string> MeteringModeTable = {
{ controls::MeteringCentreWeighted, "centre-weighted" },
{ controls::MeteringSpot, "spot" },
{ controls::MeteringMatrix, "matrix" },
{ controls::MeteringCustom, "custom" },
};
static const std::map<int32_t, std::string> ConstraintModeTable = {
{ controls::ConstraintNormal, "normal" },
{ controls::ConstraintHighlight, "highlight" },
{ controls::ConstraintShadows, "shadows" },
{ controls::ConstraintCustom, "custom" },
};
static const std::map<int32_t, std::string> ExposureModeTable = {
{ controls::ExposureNormal, "normal" },
{ controls::ExposureShort, "short" },
{ controls::ExposureLong, "long" },
{ controls::ExposureCustom, "custom" },
};
static const std::map<int32_t, std::string> AwbModeTable = {
{ controls::AwbAuto, "auto" },
{ controls::AwbIncandescent, "incandescent" },
{ controls::AwbTungsten, "tungsten" },
{ controls::AwbFluorescent, "fluorescent" },
{ controls::AwbIndoor, "indoor" },
{ controls::AwbDaylight, "daylight" },
{ controls::AwbCloudy, "cloudy" },
{ controls::AwbCustom, "custom" },
};
static const std::map<int32_t, RPiController::AfAlgorithm::AfMode> AfModeTable = {
{ controls::AfModeManual, RPiController::AfAlgorithm::AfModeManual },
{ controls::AfModeAuto, RPiController::AfAlgorithm::AfModeAuto },
{ controls::AfModeContinuous, RPiController::AfAlgorithm::AfModeContinuous },
};
static const std::map<int32_t, RPiController::AfAlgorithm::AfRange> AfRangeTable = {
{ controls::AfRangeNormal, RPiController::AfAlgorithm::AfRangeNormal },
{ controls::AfRangeMacro, RPiController::AfAlgorithm::AfRangeMacro },
{ controls::AfRangeFull, RPiController::AfAlgorithm::AfRangeFull },
};
static const std::map<int32_t, RPiController::AfAlgorithm::AfPause> AfPauseTable = {
{ controls::AfPauseImmediate, RPiController::AfAlgorithm::AfPauseImmediate },
{ controls::AfPauseDeferred, RPiController::AfAlgorithm::AfPauseDeferred },
{ controls::AfPauseResume, RPiController::AfAlgorithm::AfPauseResume },
};
static const std::map<int32_t, std::string> HdrModeTable = {
{ controls::HdrModeOff, "Off" },
{ controls::HdrModeMultiExposureUnmerged, "MultiExposureUnmerged" },
{ controls::HdrModeMultiExposure, "MultiExposure" },
{ controls::HdrModeSingleExposure, "SingleExposure" },
{ controls::HdrModeNight, "Night" },
};
void IpaBase::applyControls(const ControlList &controls)
{
using RPiController::AgcAlgorithm;
using RPiController::AfAlgorithm;
using RPiController::ContrastAlgorithm;
using RPiController::DenoiseAlgorithm;
using RPiController::HdrAlgorithm;
/* Clear the return metadata buffer. */
libcameraMetadata_.clear();
/* Because some AF controls are mode-specific, handle AF mode change first. */
if (controls.contains(controls::AF_MODE)) {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af) {
LOG(IPARPI, Warning)
<< "Could not set AF_MODE - no AF algorithm";
}
int32_t idx = controls.get(controls::AF_MODE).get<int32_t>();
auto mode = AfModeTable.find(idx);
if (mode == AfModeTable.end()) {
LOG(IPARPI, Error) << "AF mode " << idx
<< " not recognised";
} else if (af)
af->setMode(mode->second);
}
/* Iterate over controls */
for (auto const &ctrl : controls) {
LOG(IPARPI, Debug) << "Request ctrl: "
<< controls::controls.at(ctrl.first)->name()
<< " = " << ctrl.second.toString();
switch (ctrl.first) {
case controls::AE_ENABLE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_ENABLE - no AGC algorithm";
break;
}
if (ctrl.second.get<bool>() == false)
agc->disableAuto();
else
agc->enableAuto();
libcameraMetadata_.set(controls::AeEnable, ctrl.second.get<bool>());
break;
}
case controls::EXPOSURE_TIME: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_TIME - no AGC algorithm";
break;
}
/* The control provides units of microseconds. */
agc->setFixedShutter(0, ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
}
case controls::ANALOGUE_GAIN: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set ANALOGUE_GAIN - no AGC algorithm";
break;
}
agc->setFixedAnalogueGain(0, ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
break;
}
case controls::AE_METERING_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_METERING_MODE - no AGC algorithm";
break;
}
int32_t idx = ctrl.second.get<int32_t>();
if (MeteringModeTable.count(idx)) {
agc->setMeteringMode(MeteringModeTable.at(idx));
libcameraMetadata_.set(controls::AeMeteringMode, idx);
} else {
LOG(IPARPI, Error) << "Metering mode " << idx
<< " not recognised";
}
break;
}
case controls::AE_CONSTRAINT_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_CONSTRAINT_MODE - no AGC algorithm";
break;
}
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
agc->setConstraintMode(ConstraintModeTable.at(idx));
libcameraMetadata_.set(controls::AeConstraintMode, idx);
} else {
LOG(IPARPI, Error) << "Constraint mode " << idx
<< " not recognised";
}
break;
}
case controls::AE_EXPOSURE_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_EXPOSURE_MODE - no AGC algorithm";
break;
}
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
agc->setExposureMode(ExposureModeTable.at(idx));
libcameraMetadata_.set(controls::AeExposureMode, idx);
} else {
LOG(IPARPI, Error) << "Exposure mode " << idx
<< " not recognised";
}
break;
}
case controls::EXPOSURE_VALUE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_VALUE - no AGC algorithm";
break;
}
/*
* The SetEv() function takes in a direct exposure multiplier.
* So convert to 2^EV
*/
double ev = pow(2.0, ctrl.second.get<float>());
agc->setEv(0, ev);
libcameraMetadata_.set(controls::ExposureValue,
ctrl.second.get<float>());
break;
}
case controls::AE_FLICKER_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AeFlickerMode - no AGC algorithm";
break;
}
int32_t mode = ctrl.second.get<int32_t>();
bool modeValid = true;
switch (mode) {
case controls::FlickerOff:
agc->setFlickerPeriod(0us);
break;
case controls::FlickerManual:
agc->setFlickerPeriod(flickerState_.manualPeriod);
break;
default:
LOG(IPARPI, Error) << "Flicker mode " << mode << " is not supported";
modeValid = false;
break;
}
if (modeValid)
flickerState_.mode = mode;
break;
}
case controls::AE_FLICKER_PERIOD: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AeFlickerPeriod - no AGC algorithm";
break;
}
uint32_t manualPeriod = ctrl.second.get<int32_t>();
flickerState_.manualPeriod = manualPeriod * 1.0us;
/*
* We note that it makes no difference if the mode gets set to "manual"
* first, and the period updated after, or vice versa.
*/
if (flickerState_.mode == controls::FlickerManual)
agc->setFlickerPeriod(flickerState_.manualPeriod);
break;
}
case controls::AWB_ENABLE: {
/* Silently ignore this control for a mono sensor. */
if (monoSensor_)
break;
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_ENABLE - no AWB algorithm";
break;
}
if (ctrl.second.get<bool>() == false)
awb->disableAuto();
else
awb->enableAuto();
libcameraMetadata_.set(controls::AwbEnable,
ctrl.second.get<bool>());
break;
}
case controls::AWB_MODE: {
/* Silently ignore this control for a mono sensor. */
if (monoSensor_)
break;
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_MODE - no AWB algorithm";
break;
}
int32_t idx = ctrl.second.get<int32_t>();
if (AwbModeTable.count(idx)) {
awb->setMode(AwbModeTable.at(idx));
libcameraMetadata_.set(controls::AwbMode, idx);
} else {
LOG(IPARPI, Error) << "AWB mode " << idx
<< " not recognised";
}
break;
}
case controls::COLOUR_GAINS: {
/* Silently ignore this control for a mono sensor. */
if (monoSensor_)
break;
auto gains = ctrl.second.get<Span<const float>>();
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set COLOUR_GAINS - no AWB algorithm";
break;
}
awb->setManualGains(gains[0], gains[1]);
if (gains[0] != 0.0f && gains[1] != 0.0f)
/* A gain of 0.0f will switch back to auto mode. */
libcameraMetadata_.set(controls::ColourGains,
{ gains[0], gains[1] });
break;
}
case controls::BRIGHTNESS: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set BRIGHTNESS - no contrast algorithm";
break;
}
contrast->setBrightness(ctrl.second.get<float>() * 65536);
libcameraMetadata_.set(controls::Brightness,
ctrl.second.get<float>());
break;
}
case controls::CONTRAST: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set CONTRAST - no contrast algorithm";
break;
}
contrast->setContrast(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Contrast,
ctrl.second.get<float>());
break;
}
case controls::SATURATION: {
/* Silently ignore this control for a mono sensor. */
if (monoSensor_)
break;
RPiController::CcmAlgorithm *ccm = dynamic_cast<RPiController::CcmAlgorithm *>(
controller_.getAlgorithm("ccm"));
if (!ccm) {
LOG(IPARPI, Warning)
<< "Could not set SATURATION - no ccm algorithm";
break;
}
ccm->setSaturation(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Saturation,
ctrl.second.get<float>());
break;
}
case controls::SHARPNESS: {
RPiController::SharpenAlgorithm *sharpen = dynamic_cast<RPiController::SharpenAlgorithm *>(
controller_.getAlgorithm("sharpen"));
if (!sharpen) {
LOG(IPARPI, Warning)
<< "Could not set SHARPNESS - no sharpen algorithm";
break;
}
sharpen->setStrength(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Sharpness,
ctrl.second.get<float>());
break;
}
case controls::SCALER_CROP: {
/* We do nothing with this, but should avoid the warning below. */
break;
}
case controls::FRAME_DURATION_LIMITS: {
auto frameDurations = ctrl.second.get<Span<const int64_t>>();
applyFrameDurations(frameDurations[0] * 1.0us, frameDurations[1] * 1.0us);
break;
}
case controls::draft::NOISE_REDUCTION_MODE:
/* Handled below in handleControls() */
libcameraMetadata_.set(controls::draft::NoiseReductionMode,
ctrl.second.get<int32_t>());
break;
case controls::AF_MODE:
break; /* We already handled this one above */
case controls::AF_RANGE: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af) {
LOG(IPARPI, Warning)
<< "Could not set AF_RANGE - no focus algorithm";
break;
}
auto range = AfRangeTable.find(ctrl.second.get<int32_t>());
if (range == AfRangeTable.end()) {
LOG(IPARPI, Error) << "AF range " << ctrl.second.get<int32_t>()
<< " not recognised";
break;
}
af->setRange(range->second);
break;
}
case controls::AF_SPEED: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af) {
LOG(IPARPI, Warning)
<< "Could not set AF_SPEED - no focus algorithm";
break;
}
AfAlgorithm::AfSpeed speed = ctrl.second.get<int32_t>() == controls::AfSpeedFast ?
AfAlgorithm::AfSpeedFast : AfAlgorithm::AfSpeedNormal;
af->setSpeed(speed);
break;
}
case controls::AF_METERING: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af) {
LOG(IPARPI, Warning)
<< "Could not set AF_METERING - no AF algorithm";
break;
}
af->setMetering(ctrl.second.get<int32_t>() == controls::AfMeteringWindows);
break;
}
case controls::AF_WINDOWS: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af) {
LOG(IPARPI, Warning)
<< "Could not set AF_WINDOWS - no AF algorithm";
break;
}
af->setWindows(ctrl.second.get<Span<const Rectangle>>());
break;
}
case controls::AF_PAUSE: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af || af->getMode() != AfAlgorithm::AfModeContinuous) {
LOG(IPARPI, Warning)
<< "Could not set AF_PAUSE - no AF algorithm or not Continuous";
break;
}
auto pause = AfPauseTable.find(ctrl.second.get<int32_t>());
if (pause == AfPauseTable.end()) {
LOG(IPARPI, Error) << "AF pause " << ctrl.second.get<int32_t>()
<< " not recognised";
break;
}
af->pause(pause->second);
break;
}
case controls::AF_TRIGGER: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (!af || af->getMode() != AfAlgorithm::AfModeAuto) {
LOG(IPARPI, Warning)
<< "Could not set AF_TRIGGER - no AF algorithm or not Auto";
break;
} else {
if (ctrl.second.get<int32_t>() == controls::AfTriggerStart)
af->triggerScan();
else
af->cancelScan();
}
break;
}
case controls::LENS_POSITION: {
AfAlgorithm *af = dynamic_cast<AfAlgorithm *>(controller_.getAlgorithm("af"));
if (af) {
int32_t hwpos;
if (af->setLensPosition(ctrl.second.get<float>(), &hwpos)) {
ControlList lensCtrls(lensCtrls_);
lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE, hwpos);
setLensControls.emit(lensCtrls);
}
} else {
LOG(IPARPI, Warning)
<< "Could not set LENS_POSITION - no AF algorithm";
}
break;
}
case controls::HDR_MODE: {
HdrAlgorithm *hdr = dynamic_cast<HdrAlgorithm *>(controller_.getAlgorithm("hdr"));
if (!hdr) {
LOG(IPARPI, Warning) << "No HDR algorithm available";
break;
}
auto mode = HdrModeTable.find(ctrl.second.get<int32_t>());
if (mode == HdrModeTable.end()) {
LOG(IPARPI, Warning) << "Unrecognised HDR mode";
break;
}
AgcAlgorithm *agc = dynamic_cast<AgcAlgorithm *>(controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning) << "HDR requires an AGC algorithm";
break;
}
if (hdr->setMode(mode->second) == 0) {
agc->setActiveChannels(hdr->getChannels());
/* We also disable adpative contrast enhancement if HDR is running. */
ContrastAlgorithm *contrast =
dynamic_cast<ContrastAlgorithm *>(controller_.getAlgorithm("contrast"));
if (contrast) {
if (mode->second == "Off")
contrast->restoreCe();
else
contrast->enableCe(false);
}
DenoiseAlgorithm *denoise =
dynamic_cast<DenoiseAlgorithm *>(controller_.getAlgorithm("denoise"));
if (denoise) {
/* \todo - make the HDR mode say what denoise it wants? */
if (mode->second == "Night")
denoise->setConfig("night");
else if (mode->second == "SingleExposure")
denoise->setConfig("hdr");
/* MultiExposure doesn't need extra extra denoise. */
else
denoise->setConfig("normal");
}
} else
LOG(IPARPI, Warning)
<< "HDR mode " << mode->second << " not supported";
break;
}
case controls::rpi::STATS_OUTPUT_ENABLE:
statsMetadataOutput_ = ctrl.second.get<bool>();
break;
default:
LOG(IPARPI, Warning)
<< "Ctrl " << controls::controls.at(ctrl.first)->name()
<< " is not handled.";
break;
}
}
/* Give derived classes a chance to examine the new controls. */
handleControls(controls);
}
void IpaBase::fillDeviceStatus(const ControlList &sensorControls, unsigned int ipaContext)
{
DeviceStatus deviceStatus = {};
int32_t exposureLines = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
int32_t gainCode = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
int32_t vblank = sensorControls.get(V4L2_CID_VBLANK).get<int32_t>();
int32_t hblank = sensorControls.get(V4L2_CID_HBLANK).get<int32_t>();
deviceStatus.lineLength = helper_->hblankToLineLength(hblank);
deviceStatus.shutterSpeed = helper_->exposure(exposureLines, deviceStatus.lineLength);
deviceStatus.analogueGain = helper_->gain(gainCode);
deviceStatus.frameLength = mode_.height + vblank;
RPiController::AfAlgorithm *af = dynamic_cast<RPiController::AfAlgorithm *>(
controller_.getAlgorithm("af"));
if (af)
deviceStatus.lensPosition = af->getLensPosition();
LOG(IPARPI, Debug) << "Metadata - " << deviceStatus;
rpiMetadata_[ipaContext].set("device.status", deviceStatus);
}
void IpaBase::reportMetadata(unsigned int ipaContext)
{
RPiController::Metadata &rpiMetadata = rpiMetadata_[ipaContext];
std::unique_lock<RPiController::Metadata> lock(rpiMetadata);
/*
* Certain information about the current frame and how it will be
* processed can be extracted and placed into the libcamera metadata
* buffer, where an application could query it.
*/
DeviceStatus *deviceStatus = rpiMetadata.getLocked<DeviceStatus>("device.status");
if (deviceStatus) {
libcameraMetadata_.set(controls::ExposureTime,
deviceStatus->shutterSpeed.get<std::micro>());
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogueGain);
libcameraMetadata_.set(controls::FrameDuration,
helper_->exposure(deviceStatus->frameLength, deviceStatus->lineLength).get<std::micro>());
if (deviceStatus->sensorTemperature)
libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensorTemperature);
if (deviceStatus->lensPosition)
libcameraMetadata_.set(controls::LensPosition, *deviceStatus->lensPosition);
}
AgcPrepareStatus *agcPrepareStatus = rpiMetadata.getLocked<AgcPrepareStatus>("agc.prepare_status");
if (agcPrepareStatus) {
libcameraMetadata_.set(controls::AeLocked, agcPrepareStatus->locked);
libcameraMetadata_.set(controls::DigitalGain, agcPrepareStatus->digitalGain);
}
LuxStatus *luxStatus = rpiMetadata.getLocked<LuxStatus>("lux.status");
if (luxStatus)
libcameraMetadata_.set(controls::Lux, luxStatus->lux);
AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status");
if (awbStatus) {
libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gainR),
static_cast<float>(awbStatus->gainB) });
libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperatureK);
}
BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
libcameraMetadata_.set(controls::SensorBlackLevels,
{ static_cast<int32_t>(blackLevelStatus->blackLevelR),
static_cast<int32_t>(blackLevelStatus->blackLevelG),
static_cast<int32_t>(blackLevelStatus->blackLevelG),
static_cast<int32_t>(blackLevelStatus->blackLevelB) });
RPiController::FocusRegions *focusStatus =
rpiMetadata.getLocked<RPiController::FocusRegions>("focus.status");
if (focusStatus) {
/*
* Calculate the average FoM over the central (symmetric) positions
* to give an overall scene FoM. This can change later if it is
* not deemed suitable.
*/
libcamera::Size size = focusStatus->size();
unsigned rows = size.height;
unsigned cols = size.width;
uint64_t sum = 0;
unsigned int numRegions = 0;
for (unsigned r = rows / 3; r < rows - rows / 3; ++r) {
for (unsigned c = cols / 4; c < cols - cols / 4; ++c) {
sum += focusStatus->get({ (int)c, (int)r }).val;
numRegions++;
}
}
uint32_t focusFoM = sum / numRegions;
libcameraMetadata_.set(controls::FocusFoM, focusFoM);
}
CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status");
if (ccmStatus) {
float m[9];
for (unsigned int i = 0; i < 9; i++)
m[i] = ccmStatus->matrix[i];
libcameraMetadata_.set(controls::ColourCorrectionMatrix, m);
}
const AfStatus *afStatus = rpiMetadata.getLocked<AfStatus>("af.status");
if (afStatus) {
int32_t s, p;
switch (afStatus->state) {
case AfState::Scanning:
s = controls::AfStateScanning;
break;
case AfState::Focused:
s = controls::AfStateFocused;
break;
case AfState::Failed:
s = controls::AfStateFailed;
break;
default:
s = controls::AfStateIdle;
}
switch (afStatus->pauseState) {
case AfPauseState::Pausing:
p = controls::AfPauseStatePausing;
break;
case AfPauseState::Paused:
p = controls::AfPauseStatePaused;
break;
default:
p = controls::AfPauseStateRunning;
}
libcameraMetadata_.set(controls::AfState, s);
libcameraMetadata_.set(controls::AfPauseState, p);
}
/*
* THe HDR algorithm sets the HDR channel into the agc.status at the time that those
* AGC parameters were calculated several frames ago, so it comes back to us now in
* the delayed_status. If this frame is too soon after a mode switch for the
* delayed_status to be available, we use the HDR status that came out of the
* switchMode call.
*/
const AgcStatus *agcStatus = rpiMetadata.getLocked<AgcStatus>("agc.delayed_status");
const HdrStatus &hdrStatus = agcStatus ? agcStatus->hdr : hdrStatus_;
if (!hdrStatus.mode.empty() && hdrStatus.mode != "Off") {
int32_t hdrMode = controls::HdrModeOff;
for (auto const &[mode, name] : HdrModeTable) {
if (hdrStatus.mode == name) {
hdrMode = mode;
break;
}
}
libcameraMetadata_.set(controls::HdrMode, hdrMode);
if (hdrStatus.channel == "short")
libcameraMetadata_.set(controls::HdrChannel, controls::HdrChannelShort);
else if (hdrStatus.channel == "long")
libcameraMetadata_.set(controls::HdrChannel, controls::HdrChannelLong);
else if (hdrStatus.channel == "medium")
libcameraMetadata_.set(controls::HdrChannel, controls::HdrChannelMedium);
else
libcameraMetadata_.set(controls::HdrChannel, controls::HdrChannelNone);
}
metadataReady.emit(libcameraMetadata_);
}
void IpaBase::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration)
{
/*
* This will only be applied once AGC recalculations occur.
* The values may be clamped based on the sensor mode capabilities as well.
*/
minFrameDuration_ = minFrameDuration ? minFrameDuration : defaultMinFrameDuration;
maxFrameDuration_ = maxFrameDuration ? maxFrameDuration : defaultMaxFrameDuration;
minFrameDuration_ = std::clamp(minFrameDuration_,
mode_.minFrameDuration, mode_.maxFrameDuration);
maxFrameDuration_ = std::clamp(maxFrameDuration_,
mode_.minFrameDuration, mode_.maxFrameDuration);
maxFrameDuration_ = std::max(maxFrameDuration_, minFrameDuration_);
/* Return the validated limits via metadata. */
libcameraMetadata_.set(controls::FrameDurationLimits,
{ static_cast<int64_t>(minFrameDuration_.get<std::micro>()),
static_cast<int64_t>(maxFrameDuration_.get<std::micro>()) });
/*
* Calculate the maximum exposure time possible for the AGC to use.
* getBlanking() will update maxShutter with the largest exposure
* value possible.
*/
Duration maxShutter = Duration::max();
helper_->getBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
agc->setMaxShutter(maxShutter);
}
void IpaBase::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
{
const int32_t minGainCode = helper_->gainCode(mode_.minAnalogueGain);
const int32_t maxGainCode = helper_->gainCode(mode_.maxAnalogueGain);
int32_t gainCode = helper_->gainCode(agcStatus->analogueGain);
/*
* Ensure anything larger than the max gain code will not be passed to
* DelayedControls. The AGC will correctly handle a lower gain returned
* by the sensor, provided it knows the actual gain used.
*/
gainCode = std::clamp<int32_t>(gainCode, minGainCode, maxGainCode);
/* getBlanking might clip exposure time to the fps limits. */
Duration exposure = agcStatus->shutterTime;
auto [vblank, hblank] = helper_->getBlanking(exposure, minFrameDuration_, maxFrameDuration_);
int32_t exposureLines = helper_->exposureLines(exposure,
helper_->hblankToLineLength(hblank));
LOG(IPARPI, Debug) << "Applying AGC Exposure: " << exposure
<< " (Shutter lines: " << exposureLines << ", AGC requested "
<< agcStatus->shutterTime << ") Gain: "
<< agcStatus->analogueGain << " (Gain Code: "
<< gainCode << ")";
ctrls.set(V4L2_CID_VBLANK, static_cast<int32_t>(vblank));
ctrls.set(V4L2_CID_EXPOSURE, exposureLines);
ctrls.set(V4L2_CID_ANALOGUE_GAIN, gainCode);
/*
* At present, there is no way of knowing if a control is read-only.
* As a workaround, assume that if the minimum and maximum values of
* the V4L2_CID_HBLANK control are the same, it implies the control
* is read-only. This seems to be the case for all the cameras our IPA
* works with.
*
* \todo The control API ought to have a flag to specify if a control
* is read-only which could be used below.
*/
if (mode_.minLineLength != mode_.maxLineLength)
ctrls.set(V4L2_CID_HBLANK, static_cast<int32_t>(hblank));
/*
* Store the frame length times in a circular queue, up-to FrameLengthsQueueSize
* elements. This will be used to advertise a camera timeout value to the
* pipeline handler.
*/
frameLengths_.pop_front();
frameLengths_.push_back(helper_->exposure(vblank + mode_.height,
helper_->hblankToLineLength(hblank)));
}
} /* namespace ipa::RPi */
} /* namespace libcamera */
|
0 | repos/libcamera/src/ipa/rpi | repos/libcamera/src/ipa/rpi/vc4/vc4.cpp | /* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019-2021, Raspberry Pi Ltd
*
* Raspberry Pi VC4/BCM2835 ISP IPA.
*/
#include <string.h>
#include <sys/mman.h>
#include <linux/bcm2835-isp.h>
#include <libcamera/base/log.h>
#include <libcamera/base/span.h>
#include <libcamera/control_ids.h>
#include <libcamera/ipa/ipa_module_info.h>
#include "common/ipa_base.h"
#include "controller/af_status.h"
#include "controller/agc_algorithm.h"
#include "controller/alsc_status.h"
#include "controller/awb_status.h"
#include "controller/black_level_status.h"
#include "controller/ccm_status.h"
#include "controller/contrast_status.h"
#include "controller/denoise_algorithm.h"
#include "controller/denoise_status.h"
#include "controller/dpc_status.h"
#include "controller/geq_status.h"
#include "controller/lux_status.h"
#include "controller/noise_status.h"
#include "controller/sharpen_status.h"
namespace libcamera {
LOG_DECLARE_CATEGORY(IPARPI)
namespace ipa::RPi {
class IpaVc4 final : public IpaBase
{
public:
IpaVc4()
: IpaBase(), lsTable_(nullptr)
{
}
~IpaVc4()
{
if (lsTable_)
munmap(lsTable_, MaxLsGridSize);
}
private:
int32_t platformInit(const InitParams ¶ms, InitResult *result) override;
int32_t platformStart(const ControlList &controls, StartResult *result) override;
int32_t platformConfigure(const ConfigParams ¶ms, ConfigResult *result) override;
void platformPrepareIsp(const PrepareParams ¶ms, RPiController::Metadata &rpiMetadata) override;
RPiController::StatisticsPtr platformProcessStats(Span<uint8_t> mem) override;
void handleControls(const ControlList &controls) override;
bool validateIspControls();
void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls);
void applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls);
void applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls);
void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls);
void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls);
void applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls);
void applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls);
void applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls);
void applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls);
void applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls);
void applyAF(const struct AfStatus *afStatus, ControlList &lensCtrls);
void resampleTable(uint16_t dest[], const std::vector<double> &src, int destW, int destH);
/* VC4 ISP controls. */
ControlInfoMap ispCtrls_;
/* LS table allocation passed in from the pipeline handler. */
SharedFD lsTableHandle_;
void *lsTable_;
};
int32_t IpaVc4::platformInit([[maybe_unused]] const InitParams ¶ms, [[maybe_unused]] InitResult *result)
{
const std::string &target = controller_.getTarget();
if (target != "bcm2835") {
LOG(IPARPI, Error)
<< "Tuning data file target returned \"" << target << "\""
<< ", expected \"bcm2835\"";
return -EINVAL;
}
return 0;
}
int32_t IpaVc4::platformStart([[maybe_unused]] const ControlList &controls,
[[maybe_unused]] StartResult *result)
{
return 0;
}
int32_t IpaVc4::platformConfigure(const ConfigParams ¶ms, [[maybe_unused]] ConfigResult *result)
{
ispCtrls_ = params.ispControls;
if (!validateIspControls()) {
LOG(IPARPI, Error) << "ISP control validation failed.";
return -1;
}
/* Store the lens shading table pointer and handle if available. */
if (params.lsTableHandle.isValid()) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
munmap(lsTable_, MaxLsGridSize);
lsTable_ = nullptr;
}
/* Map the LS table buffer into user space. */
lsTableHandle_ = std::move(params.lsTableHandle);
if (lsTableHandle_.isValid()) {
lsTable_ = mmap(nullptr, MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.get(), 0);
if (lsTable_ == MAP_FAILED) {
LOG(IPARPI, Error) << "dmaHeap mmap failure for LS table.";
lsTable_ = nullptr;
}
}
}
return 0;
}
void IpaVc4::platformPrepareIsp([[maybe_unused]] const PrepareParams ¶ms,
RPiController::Metadata &rpiMetadata)
{
ControlList ctrls(ispCtrls_);
/* Lock the metadata buffer to avoid constant locks/unlocks. */
std::unique_lock<RPiController::Metadata> lock(rpiMetadata);
AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status");
if (awbStatus)
applyAWB(awbStatus, ctrls);
CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status");
if (ccmStatus)
applyCCM(ccmStatus, ctrls);
AgcPrepareStatus *dgStatus = rpiMetadata.getLocked<AgcPrepareStatus>("agc.prepare_status");
if (dgStatus)
applyDG(dgStatus, ctrls);
AlscStatus *lsStatus = rpiMetadata.getLocked<AlscStatus>("alsc.status");
if (lsStatus)
applyLS(lsStatus, ctrls);
ContrastStatus *contrastStatus = rpiMetadata.getLocked<ContrastStatus>("contrast.status");
if (contrastStatus)
applyGamma(contrastStatus, ctrls);
BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
applyBlackLevel(blackLevelStatus, ctrls);
GeqStatus *geqStatus = rpiMetadata.getLocked<GeqStatus>("geq.status");
if (geqStatus)
applyGEQ(geqStatus, ctrls);
DenoiseStatus *denoiseStatus = rpiMetadata.getLocked<DenoiseStatus>("denoise.status");
if (denoiseStatus)
applyDenoise(denoiseStatus, ctrls);
SharpenStatus *sharpenStatus = rpiMetadata.getLocked<SharpenStatus>("sharpen.status");
if (sharpenStatus)
applySharpen(sharpenStatus, ctrls);
DpcStatus *dpcStatus = rpiMetadata.getLocked<DpcStatus>("dpc.status");
if (dpcStatus)
applyDPC(dpcStatus, ctrls);
const AfStatus *afStatus = rpiMetadata.getLocked<AfStatus>("af.status");
if (afStatus) {
ControlList lensctrls(lensCtrls_);
applyAF(afStatus, lensctrls);
if (!lensctrls.empty())
setLensControls.emit(lensctrls);
}
if (!ctrls.empty())
setIspControls.emit(ctrls);
}
RPiController::StatisticsPtr IpaVc4::platformProcessStats(Span<uint8_t> mem)
{
using namespace RPiController;
const bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
StatisticsPtr statistics = std::make_shared<Statistics>(Statistics::AgcStatsPos::PreWb,
Statistics::ColourStatsPos::PostLsc);
const Controller::HardwareConfig &hw = controller_.getHardwareConfig();
unsigned int i;
/* RGB histograms are not used, so do not populate them. */
statistics->yHist = RPiController::Histogram(stats->hist[0].g_hist,
hw.numHistogramBins);
/* All region sums are based on a 16-bit normalised pipeline bit-depth. */
unsigned int scale = Statistics::NormalisationFactorPow2 - hw.pipelineWidth;
statistics->awbRegions.init(hw.awbRegions);
for (i = 0; i < statistics->awbRegions.numRegions(); i++)
statistics->awbRegions.set(i, { { stats->awb_stats[i].r_sum << scale,
stats->awb_stats[i].g_sum << scale,
stats->awb_stats[i].b_sum << scale },
stats->awb_stats[i].counted,
stats->awb_stats[i].notcounted });
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Debug) << "No AGC algorithm - not copying statistics";
statistics->agcRegions.init(0);
} else {
statistics->agcRegions.init(hw.agcRegions);
const std::vector<double> &weights = agc->getWeights();
for (i = 0; i < statistics->agcRegions.numRegions(); i++) {
uint64_t rSum = (stats->agc_stats[i].r_sum << scale) * weights[i];
uint64_t gSum = (stats->agc_stats[i].g_sum << scale) * weights[i];
uint64_t bSum = (stats->agc_stats[i].b_sum << scale) * weights[i];
uint32_t counted = stats->agc_stats[i].counted * weights[i];
uint32_t notcounted = stats->agc_stats[i].notcounted * weights[i];
statistics->agcRegions.set(i, { { rSum, gSum, bSum },
counted,
notcounted });
}
}
statistics->focusRegions.init(hw.focusRegions);
for (i = 0; i < statistics->focusRegions.numRegions(); i++)
statistics->focusRegions.set(i, { stats->focus_stats[i].contrast_val[1][1] / 1000,
stats->focus_stats[i].contrast_val_num[1][1],
stats->focus_stats[i].contrast_val_num[1][0] });
if (statsMetadataOutput_) {
Span<const uint8_t> statsSpan(reinterpret_cast<const uint8_t *>(stats),
sizeof(bcm2835_isp_stats));
libcameraMetadata_.set(controls::rpi::Bcm2835StatsOutput, statsSpan);
}
return statistics;
}
void IpaVc4::handleControls(const ControlList &controls)
{
static const std::map<int32_t, RPiController::DenoiseMode> DenoiseModeTable = {
{ controls::draft::NoiseReductionModeOff, RPiController::DenoiseMode::Off },
{ controls::draft::NoiseReductionModeFast, RPiController::DenoiseMode::ColourFast },
{ controls::draft::NoiseReductionModeHighQuality, RPiController::DenoiseMode::ColourHighQuality },
{ controls::draft::NoiseReductionModeMinimal, RPiController::DenoiseMode::ColourOff },
{ controls::draft::NoiseReductionModeZSL, RPiController::DenoiseMode::ColourHighQuality },
};
for (auto const &ctrl : controls) {
switch (ctrl.first) {
case controls::draft::NOISE_REDUCTION_MODE: {
RPiController::DenoiseAlgorithm *sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
controller_.getAlgorithm("SDN"));
/* Some platforms may have a combined "denoise" algorithm instead. */
if (!sdn)
sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
controller_.getAlgorithm("denoise"));
if (!sdn) {
LOG(IPARPI, Warning)
<< "Could not set NOISE_REDUCTION_MODE - no SDN algorithm";
return;
}
int32_t idx = ctrl.second.get<int32_t>();
auto mode = DenoiseModeTable.find(idx);
if (mode != DenoiseModeTable.end())
sdn->setMode(mode->second);
break;
}
}
}
}
bool IpaVc4::validateIspControls()
{
static const uint32_t ctrls[] = {
V4L2_CID_RED_BALANCE,
V4L2_CID_BLUE_BALANCE,
V4L2_CID_DIGITAL_GAIN,
V4L2_CID_USER_BCM2835_ISP_CC_MATRIX,
V4L2_CID_USER_BCM2835_ISP_GAMMA,
V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL,
V4L2_CID_USER_BCM2835_ISP_GEQ,
V4L2_CID_USER_BCM2835_ISP_DENOISE,
V4L2_CID_USER_BCM2835_ISP_SHARPEN,
V4L2_CID_USER_BCM2835_ISP_DPC,
V4L2_CID_USER_BCM2835_ISP_LENS_SHADING,
V4L2_CID_USER_BCM2835_ISP_CDN,
};
for (auto c : ctrls) {
if (ispCtrls_.find(c) == ispCtrls_.end()) {
LOG(IPARPI, Error) << "Unable to find ISP control "
<< utils::hex(c);
return false;
}
}
return true;
}
void IpaVc4::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
{
LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gainR << " B: "
<< awbStatus->gainB;
ctrls.set(V4L2_CID_RED_BALANCE,
static_cast<int32_t>(awbStatus->gainR * 1000));
ctrls.set(V4L2_CID_BLUE_BALANCE,
static_cast<int32_t>(awbStatus->gainB * 1000));
}
void IpaVc4::applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls)
{
ctrls.set(V4L2_CID_DIGITAL_GAIN,
static_cast<int32_t>(dgStatus->digitalGain * 1000));
}
void IpaVc4::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
{
bcm2835_isp_custom_ccm ccm;
for (int i = 0; i < 9; i++) {
ccm.ccm.ccm[i / 3][i % 3].den = 1000;
ccm.ccm.ccm[i / 3][i % 3].num = 1000 * ccmStatus->matrix[i];
}
ccm.enabled = 1;
ccm.ccm.offsets[0] = ccm.ccm.offsets[1] = ccm.ccm.offsets[2] = 0;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&ccm),
sizeof(ccm) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_CC_MATRIX, c);
}
void IpaVc4::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls)
{
bcm2835_isp_black_level blackLevel;
blackLevel.enabled = 1;
blackLevel.black_level_r = blackLevelStatus->blackLevelR;
blackLevel.black_level_g = blackLevelStatus->blackLevelG;
blackLevel.black_level_b = blackLevelStatus->blackLevelB;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&blackLevel),
sizeof(blackLevel) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL, c);
}
void IpaVc4::applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls)
{
const unsigned int numGammaPoints = controller_.getHardwareConfig().numGammaPoints;
struct bcm2835_isp_gamma gamma;
for (unsigned int i = 0; i < numGammaPoints - 1; i++) {
int x = i < 16 ? i * 1024
: (i < 24 ? (i - 16) * 2048 + 16384
: (i - 24) * 4096 + 32768);
gamma.x[i] = x;
gamma.y[i] = std::min<uint16_t>(65535, contrastStatus->gammaCurve.eval(x));
}
gamma.x[numGammaPoints - 1] = 65535;
gamma.y[numGammaPoints - 1] = 65535;
gamma.enabled = 1;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&gamma),
sizeof(gamma) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_GAMMA, c);
}
void IpaVc4::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls)
{
bcm2835_isp_geq geq;
geq.enabled = 1;
geq.offset = geqStatus->offset;
geq.slope.den = 1000;
geq.slope.num = 1000 * geqStatus->slope;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&geq),
sizeof(geq) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_GEQ, c);
}
void IpaVc4::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls)
{
using RPiController::DenoiseMode;
bcm2835_isp_denoise denoise;
DenoiseMode mode = static_cast<DenoiseMode>(denoiseStatus->mode);
denoise.enabled = mode != DenoiseMode::Off;
denoise.constant = denoiseStatus->noiseConstant;
denoise.slope.num = 1000 * denoiseStatus->noiseSlope;
denoise.slope.den = 1000;
denoise.strength.num = 1000 * denoiseStatus->strength;
denoise.strength.den = 1000;
/* Set the CDN mode to match the SDN operating mode. */
bcm2835_isp_cdn cdn;
switch (mode) {
case DenoiseMode::ColourFast:
cdn.enabled = 1;
cdn.mode = CDN_MODE_FAST;
break;
case DenoiseMode::ColourHighQuality:
cdn.enabled = 1;
cdn.mode = CDN_MODE_HIGH_QUALITY;
break;
default:
cdn.enabled = 0;
}
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&denoise),
sizeof(denoise) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_DENOISE, c);
c = ControlValue(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&cdn),
sizeof(cdn) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_CDN, c);
}
void IpaVc4::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls)
{
bcm2835_isp_sharpen sharpen;
sharpen.enabled = 1;
sharpen.threshold.num = 1000 * sharpenStatus->threshold;
sharpen.threshold.den = 1000;
sharpen.strength.num = 1000 * sharpenStatus->strength;
sharpen.strength.den = 1000;
sharpen.limit.num = 1000 * sharpenStatus->limit;
sharpen.limit.den = 1000;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&sharpen),
sizeof(sharpen) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_SHARPEN, c);
}
void IpaVc4::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls)
{
bcm2835_isp_dpc dpc;
dpc.enabled = 1;
dpc.strength = dpcStatus->strength;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&dpc),
sizeof(dpc) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_DPC, c);
}
void IpaVc4::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
{
/*
* Program lens shading tables into pipeline.
* Choose smallest cell size that won't exceed 63x48 cells.
*/
const int cellSizes[] = { 16, 32, 64, 128, 256 };
unsigned int numCells = std::size(cellSizes);
unsigned int i, w, h, cellSize;
for (i = 0; i < numCells; i++) {
cellSize = cellSizes[i];
w = (mode_.width + cellSize - 1) / cellSize;
h = (mode_.height + cellSize - 1) / cellSize;
if (w < 64 && h <= 48)
break;
}
if (i == numCells) {
LOG(IPARPI, Error) << "Cannot find cell size";
return;
}
/* We're going to supply corner sampled tables, 16 bit samples. */
w++, h++;
bcm2835_isp_lens_shading ls = {
.enabled = 1,
.grid_cell_size = cellSize,
.grid_width = w,
.grid_stride = w,
.grid_height = h,
/* .dmabuf will be filled in by pipeline handler. */
.dmabuf = 0,
.ref_transform = 0,
.corner_sampled = 1,
.gain_format = GAIN_FORMAT_U4P10
};
if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > MaxLsGridSize) {
LOG(IPARPI, Error) << "Do not have a correctly allocate lens shading table!";
return;
}
if (lsStatus) {
/* Format will be u4.10 */
uint16_t *grid = static_cast<uint16_t *>(lsTable_);
resampleTable(grid, lsStatus->r, w, h);
resampleTable(grid + w * h, lsStatus->g, w, h);
memcpy(grid + 2 * w * h, grid + w * h, w * h * sizeof(uint16_t));
resampleTable(grid + 3 * w * h, lsStatus->b, w, h);
}
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&ls),
sizeof(ls) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING, c);
}
void IpaVc4::applyAF(const struct AfStatus *afStatus, ControlList &lensCtrls)
{
if (afStatus->lensSetting) {
ControlValue v(afStatus->lensSetting.value());
lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE, v);
}
}
/*
* Resamples a 16x12 table with central sampling to destW x destH with corner
* sampling.
*/
void IpaVc4::resampleTable(uint16_t dest[], const std::vector<double> &src,
int destW, int destH)
{
/*
* Precalculate and cache the x sampling locations and phases to
* save recomputing them on every row.
*/
assert(destW > 1 && destH > 1 && destW <= 64);
int xLo[64], xHi[64];
double xf[64];
double x = -0.5, xInc = 16.0 / (destW - 1);
for (int i = 0; i < destW; i++, x += xInc) {
xLo[i] = floor(x);
xf[i] = x - xLo[i];
xHi[i] = xLo[i] < 15 ? xLo[i] + 1 : 15;
xLo[i] = xLo[i] > 0 ? xLo[i] : 0;
}
/* Now march over the output table generating the new values. */
double y = -0.5, yInc = 12.0 / (destH - 1);
for (int j = 0; j < destH; j++, y += yInc) {
int yLo = floor(y);
double yf = y - yLo;
int yHi = yLo < 11 ? yLo + 1 : 11;
yLo = yLo > 0 ? yLo : 0;
double const *rowAbove = src.data() + yLo * 16;
double const *rowBelow = src.data() + yHi * 16;
for (int i = 0; i < destW; i++) {
double above = rowAbove[xLo[i]] * (1 - xf[i]) + rowAbove[xHi[i]] * xf[i];
double below = rowBelow[xLo[i]] * (1 - xf[i]) + rowBelow[xHi[i]] * xf[i];
int result = floor(1024 * (above * (1 - yf) + below * yf) + .5);
*(dest++) = result > 16383 ? 16383 : result; /* want u4.10 */
}
}
}
} /* namespace ipa::RPi */
/*
* External IPA module interface
*/
extern "C" {
const struct IPAModuleInfo ipaModuleInfo = {
IPA_MODULE_API_VERSION,
1,
"rpi/vc4",
"rpi/vc4",
};
IPAInterface *ipaCreate()
{
return new ipa::RPi::IpaVc4();
}
} /* extern "C" */
} /* namespace libcamera */
|
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/ov5647.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 1024
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 21663,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 987,
"reference_Y": 8961
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 4.25
}
},
{
"rpi.geq":
{
"offset": 401,
"slope": 0.05619
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2500.0, 1.0289, 0.4503,
2803.0, 0.9428, 0.5108,
2914.0, 0.9406, 0.5127,
3605.0, 0.8261, 0.6249,
4540.0, 0.7331, 0.7533,
5699.0, 0.6715, 0.8627,
8625.0, 0.6081, 1.0012
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.0321,
"transverse_neg": 0.04313
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"base_ev": 1.25
},
{
"base_ev": 1.25,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 1.25,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.105, 1.103, 1.093, 1.083, 1.071, 1.065, 1.065, 1.065, 1.066, 1.069, 1.072, 1.077, 1.084, 1.089, 1.093, 1.093,
1.103, 1.096, 1.084, 1.072, 1.059, 1.051, 1.047, 1.047, 1.051, 1.053, 1.059, 1.067, 1.075, 1.082, 1.085, 1.086,
1.096, 1.084, 1.072, 1.059, 1.051, 1.045, 1.039, 1.038, 1.039, 1.045, 1.049, 1.057, 1.063, 1.072, 1.081, 1.082,
1.092, 1.075, 1.061, 1.052, 1.045, 1.039, 1.036, 1.035, 1.035, 1.039, 1.044, 1.049, 1.056, 1.063, 1.072, 1.081,
1.092, 1.073, 1.058, 1.048, 1.043, 1.038, 1.035, 1.033, 1.033, 1.035, 1.039, 1.044, 1.051, 1.057, 1.069, 1.078,
1.091, 1.068, 1.054, 1.045, 1.041, 1.038, 1.035, 1.032, 1.032, 1.032, 1.036, 1.041, 1.045, 1.055, 1.069, 1.078,
1.091, 1.068, 1.052, 1.043, 1.041, 1.038, 1.035, 1.032, 1.031, 1.032, 1.034, 1.036, 1.043, 1.055, 1.069, 1.078,
1.092, 1.068, 1.052, 1.047, 1.042, 1.041, 1.038, 1.035, 1.032, 1.032, 1.035, 1.039, 1.043, 1.055, 1.071, 1.079,
1.092, 1.073, 1.057, 1.051, 1.047, 1.047, 1.044, 1.041, 1.038, 1.038, 1.039, 1.043, 1.051, 1.059, 1.076, 1.083,
1.092, 1.081, 1.068, 1.058, 1.056, 1.056, 1.053, 1.052, 1.049, 1.048, 1.048, 1.051, 1.059, 1.066, 1.083, 1.085,
1.091, 1.087, 1.081, 1.068, 1.065, 1.064, 1.062, 1.062, 1.061, 1.056, 1.056, 1.056, 1.064, 1.069, 1.084, 1.089,
1.091, 1.089, 1.085, 1.079, 1.069, 1.068, 1.067, 1.067, 1.067, 1.063, 1.061, 1.063, 1.068, 1.069, 1.081, 1.092
]
},
{
"ct": 5000,
"table":
[
1.486, 1.484, 1.468, 1.449, 1.427, 1.403, 1.399, 1.399, 1.399, 1.404, 1.413, 1.433, 1.454, 1.473, 1.482, 1.488,
1.484, 1.472, 1.454, 1.431, 1.405, 1.381, 1.365, 1.365, 1.367, 1.373, 1.392, 1.411, 1.438, 1.458, 1.476, 1.481,
1.476, 1.458, 1.433, 1.405, 1.381, 1.361, 1.339, 1.334, 1.334, 1.346, 1.362, 1.391, 1.411, 1.438, 1.462, 1.474,
1.471, 1.443, 1.417, 1.388, 1.361, 1.339, 1.321, 1.313, 1.313, 1.327, 1.346, 1.362, 1.391, 1.422, 1.453, 1.473,
1.469, 1.439, 1.408, 1.377, 1.349, 1.321, 1.312, 1.299, 1.299, 1.311, 1.327, 1.348, 1.378, 1.415, 1.446, 1.468,
1.468, 1.434, 1.402, 1.371, 1.341, 1.316, 1.299, 1.296, 1.295, 1.299, 1.314, 1.338, 1.371, 1.408, 1.441, 1.466,
1.468, 1.434, 1.401, 1.371, 1.341, 1.316, 1.301, 1.296, 1.295, 1.297, 1.314, 1.338, 1.369, 1.408, 1.441, 1.465,
1.469, 1.436, 1.401, 1.374, 1.348, 1.332, 1.315, 1.301, 1.301, 1.313, 1.324, 1.342, 1.372, 1.409, 1.442, 1.465,
1.471, 1.444, 1.413, 1.388, 1.371, 1.348, 1.332, 1.323, 1.323, 1.324, 1.342, 1.362, 1.386, 1.418, 1.449, 1.467,
1.473, 1.454, 1.431, 1.407, 1.388, 1.371, 1.359, 1.352, 1.351, 1.351, 1.362, 1.383, 1.404, 1.433, 1.462, 1.472,
1.474, 1.461, 1.447, 1.424, 1.407, 1.394, 1.385, 1.381, 1.379, 1.381, 1.383, 1.401, 1.419, 1.444, 1.466, 1.481,
1.474, 1.464, 1.455, 1.442, 1.421, 1.408, 1.403, 1.403, 1.403, 1.399, 1.402, 1.415, 1.432, 1.446, 1.467, 1.483
]
},
{
"ct": 6500,
"table":
[
1.567, 1.565, 1.555, 1.541, 1.525, 1.518, 1.518, 1.518, 1.521, 1.527, 1.532, 1.541, 1.551, 1.559, 1.567, 1.569,
1.565, 1.557, 1.542, 1.527, 1.519, 1.515, 1.511, 1.516, 1.519, 1.524, 1.528, 1.533, 1.542, 1.553, 1.559, 1.562,
1.561, 1.546, 1.532, 1.521, 1.518, 1.515, 1.511, 1.516, 1.519, 1.524, 1.528, 1.529, 1.533, 1.542, 1.554, 1.559,
1.561, 1.539, 1.526, 1.524, 1.521, 1.521, 1.522, 1.524, 1.525, 1.531, 1.529, 1.529, 1.531, 1.538, 1.549, 1.558,
1.559, 1.538, 1.526, 1.525, 1.524, 1.528, 1.534, 1.536, 1.536, 1.536, 1.532, 1.529, 1.531, 1.537, 1.548, 1.556,
1.561, 1.537, 1.525, 1.524, 1.526, 1.532, 1.537, 1.539, 1.538, 1.537, 1.532, 1.529, 1.529, 1.537, 1.546, 1.556,
1.561, 1.536, 1.524, 1.522, 1.525, 1.532, 1.538, 1.538, 1.537, 1.533, 1.528, 1.526, 1.527, 1.536, 1.546, 1.555,
1.561, 1.537, 1.522, 1.521, 1.524, 1.531, 1.536, 1.537, 1.534, 1.529, 1.526, 1.522, 1.523, 1.534, 1.547, 1.555,
1.561, 1.538, 1.524, 1.522, 1.526, 1.531, 1.535, 1.535, 1.534, 1.527, 1.524, 1.522, 1.522, 1.535, 1.549, 1.556,
1.558, 1.543, 1.532, 1.526, 1.526, 1.529, 1.534, 1.535, 1.533, 1.526, 1.523, 1.522, 1.524, 1.537, 1.552, 1.557,
1.555, 1.546, 1.541, 1.528, 1.527, 1.528, 1.531, 1.533, 1.531, 1.527, 1.522, 1.522, 1.526, 1.536, 1.552, 1.561,
1.555, 1.547, 1.542, 1.538, 1.526, 1.526, 1.529, 1.531, 1.529, 1.528, 1.519, 1.519, 1.527, 1.531, 1.543, 1.561
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
1.684, 1.688, 1.691, 1.697, 1.709, 1.722, 1.735, 1.745, 1.747, 1.745, 1.731, 1.719, 1.709, 1.705, 1.699, 1.699,
1.684, 1.689, 1.694, 1.708, 1.721, 1.735, 1.747, 1.762, 1.762, 1.758, 1.745, 1.727, 1.716, 1.707, 1.701, 1.699,
1.684, 1.691, 1.704, 1.719, 1.734, 1.755, 1.772, 1.786, 1.789, 1.788, 1.762, 1.745, 1.724, 1.709, 1.702, 1.698,
1.682, 1.694, 1.709, 1.729, 1.755, 1.773, 1.798, 1.815, 1.817, 1.808, 1.788, 1.762, 1.733, 1.714, 1.704, 1.699,
1.682, 1.693, 1.713, 1.742, 1.772, 1.798, 1.815, 1.829, 1.831, 1.821, 1.807, 1.773, 1.742, 1.716, 1.703, 1.699,
1.681, 1.693, 1.713, 1.742, 1.772, 1.799, 1.828, 1.839, 1.839, 1.828, 1.807, 1.774, 1.742, 1.715, 1.699, 1.695,
1.679, 1.691, 1.712, 1.739, 1.771, 1.798, 1.825, 1.829, 1.831, 1.818, 1.801, 1.774, 1.738, 1.712, 1.695, 1.691,
1.676, 1.685, 1.703, 1.727, 1.761, 1.784, 1.801, 1.817, 1.817, 1.801, 1.779, 1.761, 1.729, 1.706, 1.691, 1.684,
1.669, 1.678, 1.692, 1.714, 1.741, 1.764, 1.784, 1.795, 1.795, 1.779, 1.761, 1.738, 1.713, 1.696, 1.683, 1.679,
1.664, 1.671, 1.679, 1.693, 1.716, 1.741, 1.762, 1.769, 1.769, 1.753, 1.738, 1.713, 1.701, 1.687, 1.681, 1.676,
1.661, 1.664, 1.671, 1.679, 1.693, 1.714, 1.732, 1.739, 1.739, 1.729, 1.708, 1.701, 1.685, 1.679, 1.676, 1.677,
1.659, 1.661, 1.664, 1.671, 1.679, 1.693, 1.712, 1.714, 1.714, 1.708, 1.701, 1.687, 1.679, 1.672, 1.673, 1.677
]
},
{
"ct": 5000,
"table":
[
1.177, 1.183, 1.187, 1.191, 1.197, 1.206, 1.213, 1.215, 1.215, 1.215, 1.211, 1.204, 1.196, 1.191, 1.183, 1.182,
1.179, 1.185, 1.191, 1.196, 1.206, 1.217, 1.224, 1.229, 1.229, 1.226, 1.221, 1.212, 1.202, 1.195, 1.188, 1.182,
1.183, 1.191, 1.196, 1.206, 1.217, 1.229, 1.239, 1.245, 1.245, 1.245, 1.233, 1.221, 1.212, 1.199, 1.193, 1.187,
1.183, 1.192, 1.201, 1.212, 1.229, 1.241, 1.252, 1.259, 1.259, 1.257, 1.245, 1.233, 1.217, 1.201, 1.194, 1.192,
1.183, 1.192, 1.202, 1.219, 1.238, 1.252, 1.261, 1.269, 1.268, 1.261, 1.257, 1.241, 1.223, 1.204, 1.194, 1.191,
1.182, 1.192, 1.202, 1.219, 1.239, 1.255, 1.266, 1.271, 1.271, 1.265, 1.258, 1.242, 1.223, 1.205, 1.192, 1.191,
1.181, 1.189, 1.199, 1.218, 1.239, 1.254, 1.262, 1.268, 1.268, 1.258, 1.253, 1.241, 1.221, 1.204, 1.191, 1.187,
1.179, 1.184, 1.193, 1.211, 1.232, 1.243, 1.254, 1.257, 1.256, 1.253, 1.242, 1.232, 1.216, 1.199, 1.187, 1.183,
1.174, 1.179, 1.187, 1.202, 1.218, 1.232, 1.243, 1.246, 1.246, 1.239, 1.232, 1.218, 1.207, 1.191, 1.183, 1.179,
1.169, 1.175, 1.181, 1.189, 1.202, 1.218, 1.229, 1.232, 1.232, 1.224, 1.218, 1.207, 1.199, 1.185, 1.181, 1.174,
1.164, 1.168, 1.175, 1.179, 1.189, 1.201, 1.209, 1.213, 1.213, 1.209, 1.201, 1.198, 1.186, 1.181, 1.174, 1.173,
1.161, 1.166, 1.171, 1.175, 1.179, 1.189, 1.197, 1.198, 1.198, 1.197, 1.196, 1.186, 1.182, 1.175, 1.173, 1.173
]
},
{
"ct": 6500,
"table":
[
1.166, 1.171, 1.173, 1.178, 1.187, 1.193, 1.201, 1.205, 1.205, 1.205, 1.199, 1.191, 1.184, 1.179, 1.174, 1.171,
1.166, 1.172, 1.176, 1.184, 1.195, 1.202, 1.209, 1.216, 1.216, 1.213, 1.208, 1.201, 1.189, 1.182, 1.176, 1.171,
1.166, 1.173, 1.183, 1.195, 1.202, 1.214, 1.221, 1.228, 1.229, 1.228, 1.221, 1.209, 1.201, 1.186, 1.179, 1.174,
1.165, 1.174, 1.187, 1.201, 1.214, 1.223, 1.235, 1.241, 1.242, 1.241, 1.229, 1.221, 1.205, 1.188, 1.181, 1.177,
1.165, 1.174, 1.189, 1.207, 1.223, 1.235, 1.242, 1.253, 1.252, 1.245, 1.241, 1.228, 1.211, 1.189, 1.181, 1.178,
1.164, 1.173, 1.189, 1.207, 1.224, 1.238, 1.249, 1.255, 1.255, 1.249, 1.242, 1.228, 1.211, 1.191, 1.179, 1.176,
1.163, 1.172, 1.187, 1.207, 1.223, 1.237, 1.245, 1.253, 1.252, 1.243, 1.237, 1.228, 1.207, 1.188, 1.176, 1.173,
1.159, 1.167, 1.179, 1.199, 1.217, 1.227, 1.237, 1.241, 1.241, 1.237, 1.228, 1.217, 1.201, 1.184, 1.174, 1.169,
1.156, 1.164, 1.172, 1.189, 1.205, 1.217, 1.226, 1.229, 1.229, 1.222, 1.217, 1.204, 1.192, 1.177, 1.171, 1.166,
1.154, 1.159, 1.166, 1.177, 1.189, 1.205, 1.213, 1.216, 1.216, 1.209, 1.204, 1.192, 1.183, 1.172, 1.168, 1.162,
1.152, 1.155, 1.161, 1.166, 1.177, 1.188, 1.195, 1.198, 1.199, 1.196, 1.187, 1.183, 1.173, 1.168, 1.163, 1.162,
1.151, 1.154, 1.158, 1.162, 1.168, 1.177, 1.183, 1.184, 1.184, 1.184, 1.182, 1.172, 1.168, 1.165, 1.162, 1.161
]
}
],
"luminance_lut":
[
2.236, 2.111, 1.912, 1.741, 1.579, 1.451, 1.379, 1.349, 1.349, 1.361, 1.411, 1.505, 1.644, 1.816, 2.034, 2.159,
2.139, 1.994, 1.796, 1.625, 1.467, 1.361, 1.285, 1.248, 1.239, 1.265, 1.321, 1.408, 1.536, 1.703, 1.903, 2.087,
2.047, 1.898, 1.694, 1.511, 1.373, 1.254, 1.186, 1.152, 1.142, 1.166, 1.226, 1.309, 1.441, 1.598, 1.799, 1.978,
1.999, 1.824, 1.615, 1.429, 1.281, 1.179, 1.113, 1.077, 1.071, 1.096, 1.153, 1.239, 1.357, 1.525, 1.726, 1.915,
1.976, 1.773, 1.563, 1.374, 1.222, 1.119, 1.064, 1.032, 1.031, 1.049, 1.099, 1.188, 1.309, 1.478, 1.681, 1.893,
1.973, 1.756, 1.542, 1.351, 1.196, 1.088, 1.028, 1.011, 1.004, 1.029, 1.077, 1.169, 1.295, 1.459, 1.663, 1.891,
1.973, 1.761, 1.541, 1.349, 1.193, 1.087, 1.031, 1.006, 1.006, 1.023, 1.075, 1.169, 1.298, 1.463, 1.667, 1.891,
1.982, 1.789, 1.568, 1.373, 1.213, 1.111, 1.051, 1.029, 1.024, 1.053, 1.106, 1.199, 1.329, 1.495, 1.692, 1.903,
2.015, 1.838, 1.621, 1.426, 1.268, 1.159, 1.101, 1.066, 1.068, 1.099, 1.166, 1.259, 1.387, 1.553, 1.751, 1.937,
2.076, 1.911, 1.692, 1.507, 1.346, 1.236, 1.169, 1.136, 1.139, 1.174, 1.242, 1.349, 1.475, 1.641, 1.833, 2.004,
2.193, 2.011, 1.798, 1.604, 1.444, 1.339, 1.265, 1.235, 1.237, 1.273, 1.351, 1.461, 1.598, 1.758, 1.956, 2.125,
2.263, 2.154, 1.916, 1.711, 1.549, 1.432, 1.372, 1.356, 1.356, 1.383, 1.455, 1.578, 1.726, 1.914, 2.119, 2.211
],
"sigma": 0.006,
"sigma_Cb": 0.00208
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2873,
"ccm":
[
1.88195, -0.26249, -0.61946,
-0.63842, 2.11535, -0.47693,
-0.13531, -0.99739, 2.13271
]
},
{
"ct": 2965,
"ccm":
[
2.15048, -0.51859, -0.63189,
-0.53572, 1.92585, -0.39013,
0.01831, -1.48576, 2.46744
]
},
{
"ct": 3606,
"ccm":
[
1.97522, -0.43847, -0.53675,
-0.56151, 1.99765, -0.43614,
-0.12438, -0.77056, 1.89493
]
},
{
"ct": 4700,
"ccm":
[
2.00971, -0.51461, -0.49511,
-0.52109, 2.01003, -0.48894,
-0.09527, -0.67318, 1.76845
]
},
{
"ct": 5890,
"ccm":
[
2.13616, -0.65283, -0.48333,
-0.48364, 1.93115, -0.44751,
-0.13465, -0.54831, 1.68295
]
},
{
"ct": 7600,
"ccm":
[
2.06599, -0.39161, -0.67439,
-0.50883, 2.27467, -0.76583,
-0.13961, -0.66121, 1.80081
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
}
|
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx477_v1.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27242,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 830,
"reference_Y": 17755
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.767
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01078
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2360.0, 0.6009, 0.3093,
2870.0, 0.5047, 0.3936,
2970.0, 0.4782, 0.4221,
3700.0, 0.4212, 0.4923,
3870.0, 0.4037, 0.5166,
4000.0, 0.3965, 0.5271,
4400.0, 0.3703, 0.5666,
4715.0, 0.3411, 0.6147,
5920.0, 0.3108, 0.6687,
9050.0, 0.2524, 0.7856
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.0238,
"transverse_neg": 0.04429
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 2960,
"table":
[
2.088, 2.086, 2.082, 2.081, 2.077, 2.071, 2.068, 2.068, 2.072, 2.073, 2.075, 2.078, 2.084, 2.092, 2.095, 2.098,
2.086, 2.084, 2.079, 2.078, 2.075, 2.068, 2.064, 2.063, 2.068, 2.071, 2.072, 2.075, 2.081, 2.089, 2.092, 2.094,
2.083, 2.081, 2.077, 2.072, 2.069, 2.062, 2.059, 2.059, 2.063, 2.067, 2.069, 2.072, 2.079, 2.088, 2.089, 2.089,
2.081, 2.077, 2.072, 2.068, 2.065, 2.058, 2.055, 2.054, 2.057, 2.062, 2.066, 2.069, 2.077, 2.084, 2.086, 2.086,
2.078, 2.075, 2.069, 2.065, 2.061, 2.055, 2.052, 2.049, 2.051, 2.056, 2.062, 2.065, 2.072, 2.079, 2.081, 2.079,
2.079, 2.075, 2.069, 2.064, 2.061, 2.053, 2.049, 2.046, 2.049, 2.051, 2.057, 2.062, 2.069, 2.075, 2.077, 2.075,
2.082, 2.079, 2.072, 2.065, 2.061, 2.054, 2.049, 2.047, 2.049, 2.051, 2.056, 2.061, 2.066, 2.073, 2.073, 2.069,
2.086, 2.082, 2.075, 2.068, 2.062, 2.054, 2.051, 2.049, 2.051, 2.052, 2.056, 2.061, 2.066, 2.073, 2.073, 2.072,
2.088, 2.086, 2.079, 2.074, 2.066, 2.057, 2.051, 2.051, 2.054, 2.055, 2.056, 2.061, 2.067, 2.072, 2.073, 2.072,
2.091, 2.087, 2.079, 2.075, 2.068, 2.057, 2.052, 2.052, 2.056, 2.055, 2.055, 2.059, 2.066, 2.072, 2.072, 2.072,
2.093, 2.088, 2.081, 2.077, 2.069, 2.059, 2.054, 2.054, 2.057, 2.056, 2.056, 2.058, 2.066, 2.072, 2.073, 2.073,
2.095, 2.091, 2.084, 2.078, 2.075, 2.067, 2.057, 2.057, 2.059, 2.059, 2.058, 2.059, 2.068, 2.073, 2.075, 2.078
]
},
{
"ct": 4850,
"table":
[
2.973, 2.968, 2.956, 2.943, 2.941, 2.932, 2.923, 2.921, 2.924, 2.929, 2.931, 2.939, 2.953, 2.965, 2.966, 2.976,
2.969, 2.962, 2.951, 2.941, 2.934, 2.928, 2.919, 2.918, 2.919, 2.923, 2.927, 2.933, 2.945, 2.957, 2.962, 2.962,
2.964, 2.956, 2.944, 2.932, 2.929, 2.924, 2.915, 2.914, 2.915, 2.919, 2.924, 2.928, 2.941, 2.952, 2.958, 2.959,
2.957, 2.951, 2.939, 2.928, 2.924, 2.919, 2.913, 2.911, 2.911, 2.915, 2.919, 2.925, 2.936, 2.947, 2.952, 2.953,
2.954, 2.947, 2.935, 2.924, 2.919, 2.915, 2.908, 2.906, 2.906, 2.907, 2.914, 2.921, 2.932, 2.941, 2.943, 2.942,
2.953, 2.946, 2.932, 2.921, 2.916, 2.911, 2.904, 2.902, 2.901, 2.904, 2.909, 2.919, 2.926, 2.937, 2.939, 2.939,
2.953, 2.947, 2.932, 2.918, 2.915, 2.909, 2.903, 2.901, 2.901, 2.906, 2.911, 2.918, 2.924, 2.936, 2.936, 2.932,
2.956, 2.948, 2.934, 2.919, 2.916, 2.908, 2.903, 2.901, 2.902, 2.907, 2.909, 2.917, 2.926, 2.936, 2.939, 2.939,
2.957, 2.951, 2.936, 2.923, 2.917, 2.907, 2.904, 2.901, 2.902, 2.908, 2.911, 2.919, 2.929, 2.939, 2.942, 2.942,
2.961, 2.951, 2.936, 2.922, 2.918, 2.906, 2.904, 2.901, 2.901, 2.907, 2.911, 2.921, 2.931, 2.941, 2.942, 2.944,
2.964, 2.954, 2.936, 2.924, 2.918, 2.909, 2.905, 2.905, 2.905, 2.907, 2.912, 2.923, 2.933, 2.942, 2.944, 2.944,
2.964, 2.958, 2.943, 2.927, 2.921, 2.914, 2.909, 2.907, 2.907, 2.912, 2.916, 2.928, 2.936, 2.944, 2.947, 2.952
]
},
{
"ct": 5930,
"table":
[
3.312, 3.308, 3.301, 3.294, 3.288, 3.277, 3.268, 3.261, 3.259, 3.261, 3.267, 3.273, 3.285, 3.301, 3.303, 3.312,
3.308, 3.304, 3.294, 3.291, 3.283, 3.271, 3.263, 3.259, 3.257, 3.258, 3.261, 3.268, 3.278, 3.293, 3.299, 3.299,
3.302, 3.296, 3.288, 3.282, 3.276, 3.267, 3.259, 3.254, 3.252, 3.253, 3.256, 3.261, 3.273, 3.289, 3.292, 3.292,
3.296, 3.289, 3.282, 3.276, 3.269, 3.263, 3.256, 3.251, 3.248, 3.249, 3.251, 3.257, 3.268, 3.279, 3.284, 3.284,
3.292, 3.285, 3.279, 3.271, 3.264, 3.257, 3.249, 3.243, 3.241, 3.241, 3.246, 3.252, 3.261, 3.274, 3.275, 3.273,
3.291, 3.285, 3.276, 3.268, 3.259, 3.251, 3.242, 3.239, 3.236, 3.238, 3.244, 3.248, 3.258, 3.268, 3.269, 3.265,
3.294, 3.288, 3.275, 3.266, 3.257, 3.248, 3.239, 3.238, 3.237, 3.238, 3.243, 3.246, 3.255, 3.264, 3.264, 3.257,
3.297, 3.293, 3.279, 3.268, 3.258, 3.249, 3.238, 3.237, 3.239, 3.239, 3.243, 3.245, 3.255, 3.264, 3.264, 3.263,
3.301, 3.295, 3.281, 3.271, 3.259, 3.248, 3.237, 3.237, 3.239, 3.241, 3.243, 3.246, 3.257, 3.265, 3.266, 3.264,
3.306, 3.295, 3.279, 3.271, 3.261, 3.247, 3.235, 3.234, 3.239, 3.239, 3.243, 3.247, 3.258, 3.265, 3.265, 3.264,
3.308, 3.297, 3.279, 3.272, 3.261, 3.249, 3.239, 3.239, 3.241, 3.243, 3.245, 3.248, 3.261, 3.265, 3.266, 3.265,
3.309, 3.301, 3.286, 3.276, 3.267, 3.256, 3.246, 3.242, 3.244, 3.244, 3.249, 3.253, 3.263, 3.267, 3.271, 3.274
]
}
],
"calibrations_Cb": [
{
"ct": 2960,
"table":
[
2.133, 2.134, 2.139, 2.143, 2.148, 2.155, 2.158, 2.158, 2.158, 2.161, 2.161, 2.162, 2.159, 2.156, 2.152, 2.151,
2.132, 2.133, 2.135, 2.142, 2.147, 2.153, 2.158, 2.158, 2.158, 2.158, 2.159, 2.159, 2.157, 2.154, 2.151, 2.148,
2.133, 2.133, 2.135, 2.142, 2.149, 2.154, 2.158, 2.158, 2.157, 2.156, 2.158, 2.157, 2.155, 2.153, 2.148, 2.146,
2.133, 2.133, 2.138, 2.145, 2.149, 2.154, 2.158, 2.159, 2.158, 2.155, 2.157, 2.156, 2.153, 2.149, 2.146, 2.144,
2.133, 2.134, 2.139, 2.146, 2.149, 2.154, 2.158, 2.159, 2.159, 2.156, 2.154, 2.154, 2.149, 2.145, 2.143, 2.139,
2.135, 2.135, 2.139, 2.146, 2.151, 2.155, 2.158, 2.159, 2.158, 2.156, 2.153, 2.151, 2.146, 2.143, 2.139, 2.136,
2.135, 2.135, 2.138, 2.145, 2.151, 2.154, 2.157, 2.158, 2.157, 2.156, 2.153, 2.151, 2.147, 2.143, 2.141, 2.137,
2.135, 2.134, 2.135, 2.141, 2.149, 2.154, 2.157, 2.157, 2.157, 2.157, 2.157, 2.153, 2.149, 2.146, 2.142, 2.139,
2.132, 2.133, 2.135, 2.139, 2.148, 2.153, 2.158, 2.159, 2.159, 2.161, 2.161, 2.157, 2.154, 2.149, 2.144, 2.141,
2.132, 2.133, 2.135, 2.141, 2.149, 2.155, 2.161, 2.161, 2.162, 2.162, 2.163, 2.159, 2.154, 2.149, 2.144, 2.138,
2.136, 2.136, 2.137, 2.143, 2.149, 2.156, 2.162, 2.163, 2.162, 2.163, 2.164, 2.161, 2.157, 2.152, 2.146, 2.138,
2.137, 2.137, 2.141, 2.147, 2.152, 2.157, 2.162, 2.162, 2.159, 2.161, 2.162, 2.162, 2.157, 2.152, 2.148, 2.148
]
},
{
"ct": 4850,
"table":
[
1.463, 1.464, 1.471, 1.478, 1.479, 1.483, 1.484, 1.486, 1.486, 1.484, 1.483, 1.481, 1.478, 1.475, 1.471, 1.468,
1.463, 1.463, 1.468, 1.476, 1.479, 1.482, 1.484, 1.487, 1.486, 1.484, 1.483, 1.482, 1.478, 1.473, 1.469, 1.468,
1.463, 1.464, 1.468, 1.476, 1.479, 1.483, 1.484, 1.486, 1.486, 1.485, 1.484, 1.482, 1.477, 1.473, 1.469, 1.468,
1.463, 1.464, 1.469, 1.477, 1.481, 1.483, 1.485, 1.487, 1.487, 1.485, 1.485, 1.482, 1.478, 1.474, 1.469, 1.468,
1.465, 1.465, 1.471, 1.478, 1.481, 1.484, 1.486, 1.488, 1.488, 1.487, 1.485, 1.482, 1.477, 1.472, 1.468, 1.467,
1.465, 1.466, 1.472, 1.479, 1.482, 1.485, 1.486, 1.488, 1.488, 1.486, 1.484, 1.479, 1.475, 1.472, 1.468, 1.466,
1.466, 1.466, 1.472, 1.478, 1.482, 1.484, 1.485, 1.488, 1.487, 1.485, 1.483, 1.479, 1.475, 1.472, 1.469, 1.468,
1.465, 1.466, 1.469, 1.476, 1.481, 1.485, 1.485, 1.486, 1.486, 1.485, 1.483, 1.479, 1.477, 1.474, 1.471, 1.469,
1.464, 1.465, 1.469, 1.476, 1.481, 1.484, 1.485, 1.487, 1.487, 1.486, 1.485, 1.481, 1.478, 1.475, 1.471, 1.469,
1.463, 1.464, 1.469, 1.477, 1.481, 1.485, 1.485, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.471, 1.468,
1.464, 1.465, 1.471, 1.478, 1.482, 1.486, 1.486, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.472, 1.468,
1.465, 1.466, 1.472, 1.481, 1.483, 1.487, 1.487, 1.488, 1.488, 1.486, 1.485, 1.481, 1.479, 1.476, 1.473, 1.472
]
},
{
"ct": 5930,
"table":
[
1.443, 1.444, 1.448, 1.453, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.466, 1.462, 1.457, 1.454, 1.451,
1.443, 1.444, 1.445, 1.451, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.451,
1.444, 1.444, 1.445, 1.451, 1.459, 1.463, 1.466, 1.468, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.449,
1.444, 1.444, 1.447, 1.452, 1.459, 1.464, 1.467, 1.469, 1.471, 1.469, 1.467, 1.466, 1.461, 1.456, 1.452, 1.449,
1.444, 1.445, 1.448, 1.452, 1.459, 1.465, 1.469, 1.471, 1.471, 1.471, 1.468, 1.465, 1.461, 1.455, 1.451, 1.449,
1.445, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.471, 1.472, 1.469, 1.467, 1.465, 1.459, 1.455, 1.451, 1.447,
1.446, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.459, 1.455, 1.452, 1.449,
1.446, 1.446, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.461, 1.457, 1.454, 1.451,
1.444, 1.444, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.471, 1.471, 1.468, 1.466, 1.462, 1.458, 1.454, 1.452,
1.444, 1.444, 1.448, 1.453, 1.459, 1.466, 1.469, 1.471, 1.472, 1.472, 1.468, 1.466, 1.462, 1.458, 1.454, 1.449,
1.446, 1.447, 1.449, 1.454, 1.461, 1.466, 1.471, 1.471, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.455, 1.449,
1.447, 1.447, 1.452, 1.457, 1.462, 1.468, 1.472, 1.472, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.456, 1.455
]
}
],
"luminance_lut":
[
1.548, 1.499, 1.387, 1.289, 1.223, 1.183, 1.164, 1.154, 1.153, 1.169, 1.211, 1.265, 1.345, 1.448, 1.581, 1.619,
1.513, 1.412, 1.307, 1.228, 1.169, 1.129, 1.105, 1.098, 1.103, 1.127, 1.157, 1.209, 1.272, 1.361, 1.481, 1.583,
1.449, 1.365, 1.257, 1.175, 1.124, 1.085, 1.062, 1.054, 1.059, 1.079, 1.113, 1.151, 1.211, 1.293, 1.407, 1.488,
1.424, 1.324, 1.222, 1.139, 1.089, 1.056, 1.034, 1.031, 1.034, 1.049, 1.075, 1.115, 1.164, 1.241, 1.351, 1.446,
1.412, 1.297, 1.203, 1.119, 1.069, 1.039, 1.021, 1.016, 1.022, 1.032, 1.052, 1.086, 1.135, 1.212, 1.321, 1.439,
1.406, 1.287, 1.195, 1.115, 1.059, 1.028, 1.014, 1.012, 1.015, 1.026, 1.041, 1.074, 1.125, 1.201, 1.302, 1.425,
1.406, 1.294, 1.205, 1.126, 1.062, 1.031, 1.013, 1.009, 1.011, 1.019, 1.042, 1.079, 1.129, 1.203, 1.302, 1.435,
1.415, 1.318, 1.229, 1.146, 1.076, 1.039, 1.019, 1.014, 1.017, 1.031, 1.053, 1.093, 1.144, 1.219, 1.314, 1.436,
1.435, 1.348, 1.246, 1.164, 1.094, 1.059, 1.036, 1.032, 1.037, 1.049, 1.072, 1.114, 1.167, 1.257, 1.343, 1.462,
1.471, 1.385, 1.278, 1.189, 1.124, 1.084, 1.064, 1.061, 1.069, 1.078, 1.101, 1.146, 1.207, 1.298, 1.415, 1.496,
1.522, 1.436, 1.323, 1.228, 1.169, 1.118, 1.101, 1.094, 1.099, 1.113, 1.146, 1.194, 1.265, 1.353, 1.474, 1.571,
1.578, 1.506, 1.378, 1.281, 1.211, 1.156, 1.135, 1.134, 1.139, 1.158, 1.194, 1.251, 1.327, 1.427, 1.559, 1.611
],
"sigma": 0.00121,
"sigma_Cb": 0.00115
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2360,
"ccm":
[
1.66078, -0.23588, -0.42491,
-0.47456, 1.82763, -0.35307,
-0.00545, -1.44729, 2.45273
]
},
{
"ct": 2870,
"ccm":
[
1.78373, -0.55344, -0.23029,
-0.39951, 1.69701, -0.29751,
0.01986, -1.06525, 2.04539
]
},
{
"ct": 2970,
"ccm":
[
1.73511, -0.56973, -0.16537,
-0.36338, 1.69878, -0.33539,
-0.02354, -0.76813, 1.79168
]
},
{
"ct": 3000,
"ccm":
[
2.06374, -0.92218, -0.14156,
-0.41721, 1.69289, -0.27568,
-0.00554, -0.92741, 1.93295
]
},
{
"ct": 3700,
"ccm":
[
2.13792, -1.08136, -0.05655,
-0.34739, 1.58989, -0.24249,
-0.00349, -0.76789, 1.77138
]
},
{
"ct": 3870,
"ccm":
[
1.83834, -0.70528, -0.13307,
-0.30499, 1.60523, -0.30024,
-0.05701, -0.58313, 1.64014
]
},
{
"ct": 4000,
"ccm":
[
2.15741, -1.10295, -0.05447,
-0.34631, 1.61158, -0.26528,
-0.02723, -0.70288, 1.73011
]
},
{
"ct": 4400,
"ccm":
[
2.05729, -0.95007, -0.10723,
-0.41712, 1.78606, -0.36894,
-0.11899, -0.55727, 1.67626
]
},
{
"ct": 4715,
"ccm":
[
1.90255, -0.77478, -0.12777,
-0.31338, 1.88197, -0.56858,
-0.06001, -0.61785, 1.67786
]
},
{
"ct": 5920,
"ccm":
[
1.98691, -0.84671, -0.14019,
-0.26581, 1.70615, -0.44035,
-0.09532, -0.47332, 1.56864
]
},
{
"ct": 9050,
"ccm":
[
2.09255, -0.76541, -0.32714,
-0.28973, 2.27462, -0.98489,
-0.17299, -0.61275, 1.78574
]
}
]
}
},
{
"rpi.sharpen": { }
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx477.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27242,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 830,
"reference_Y": 17755
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.767
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01078
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2360.0, 0.6009, 0.3093,
2848.0, 0.5071, 0.4,
2903.0, 0.4905, 0.4392,
3628.0, 0.4261, 0.5564,
3643.0, 0.4228, 0.5623,
4660.0, 0.3529, 0.68,
5579.0, 0.3227, 0.7,
6125.0, 0.3129, 0.71,
6671.0, 0.3065, 0.72,
7217.0, 0.3014, 0.73,
7763.0, 0.295, 0.74,
9505.0, 0.2524, 0.7856
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.0238,
"transverse_neg": 0.04429
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 2960,
"table":
[
2.088, 2.086, 2.082, 2.081, 2.077, 2.071, 2.068, 2.068, 2.072, 2.073, 2.075, 2.078, 2.084, 2.092, 2.095, 2.098,
2.086, 2.084, 2.079, 2.078, 2.075, 2.068, 2.064, 2.063, 2.068, 2.071, 2.072, 2.075, 2.081, 2.089, 2.092, 2.094,
2.083, 2.081, 2.077, 2.072, 2.069, 2.062, 2.059, 2.059, 2.063, 2.067, 2.069, 2.072, 2.079, 2.088, 2.089, 2.089,
2.081, 2.077, 2.072, 2.068, 2.065, 2.058, 2.055, 2.054, 2.057, 2.062, 2.066, 2.069, 2.077, 2.084, 2.086, 2.086,
2.078, 2.075, 2.069, 2.065, 2.061, 2.055, 2.052, 2.049, 2.051, 2.056, 2.062, 2.065, 2.072, 2.079, 2.081, 2.079,
2.079, 2.075, 2.069, 2.064, 2.061, 2.053, 2.049, 2.046, 2.049, 2.051, 2.057, 2.062, 2.069, 2.075, 2.077, 2.075,
2.082, 2.079, 2.072, 2.065, 2.061, 2.054, 2.049, 2.047, 2.049, 2.051, 2.056, 2.061, 2.066, 2.073, 2.073, 2.069,
2.086, 2.082, 2.075, 2.068, 2.062, 2.054, 2.051, 2.049, 2.051, 2.052, 2.056, 2.061, 2.066, 2.073, 2.073, 2.072,
2.088, 2.086, 2.079, 2.074, 2.066, 2.057, 2.051, 2.051, 2.054, 2.055, 2.056, 2.061, 2.067, 2.072, 2.073, 2.072,
2.091, 2.087, 2.079, 2.075, 2.068, 2.057, 2.052, 2.052, 2.056, 2.055, 2.055, 2.059, 2.066, 2.072, 2.072, 2.072,
2.093, 2.088, 2.081, 2.077, 2.069, 2.059, 2.054, 2.054, 2.057, 2.056, 2.056, 2.058, 2.066, 2.072, 2.073, 2.073,
2.095, 2.091, 2.084, 2.078, 2.075, 2.067, 2.057, 2.057, 2.059, 2.059, 2.058, 2.059, 2.068, 2.073, 2.075, 2.078
]
},
{
"ct": 4850,
"table":
[
2.973, 2.968, 2.956, 2.943, 2.941, 2.932, 2.923, 2.921, 2.924, 2.929, 2.931, 2.939, 2.953, 2.965, 2.966, 2.976,
2.969, 2.962, 2.951, 2.941, 2.934, 2.928, 2.919, 2.918, 2.919, 2.923, 2.927, 2.933, 2.945, 2.957, 2.962, 2.962,
2.964, 2.956, 2.944, 2.932, 2.929, 2.924, 2.915, 2.914, 2.915, 2.919, 2.924, 2.928, 2.941, 2.952, 2.958, 2.959,
2.957, 2.951, 2.939, 2.928, 2.924, 2.919, 2.913, 2.911, 2.911, 2.915, 2.919, 2.925, 2.936, 2.947, 2.952, 2.953,
2.954, 2.947, 2.935, 2.924, 2.919, 2.915, 2.908, 2.906, 2.906, 2.907, 2.914, 2.921, 2.932, 2.941, 2.943, 2.942,
2.953, 2.946, 2.932, 2.921, 2.916, 2.911, 2.904, 2.902, 2.901, 2.904, 2.909, 2.919, 2.926, 2.937, 2.939, 2.939,
2.953, 2.947, 2.932, 2.918, 2.915, 2.909, 2.903, 2.901, 2.901, 2.906, 2.911, 2.918, 2.924, 2.936, 2.936, 2.932,
2.956, 2.948, 2.934, 2.919, 2.916, 2.908, 2.903, 2.901, 2.902, 2.907, 2.909, 2.917, 2.926, 2.936, 2.939, 2.939,
2.957, 2.951, 2.936, 2.923, 2.917, 2.907, 2.904, 2.901, 2.902, 2.908, 2.911, 2.919, 2.929, 2.939, 2.942, 2.942,
2.961, 2.951, 2.936, 2.922, 2.918, 2.906, 2.904, 2.901, 2.901, 2.907, 2.911, 2.921, 2.931, 2.941, 2.942, 2.944,
2.964, 2.954, 2.936, 2.924, 2.918, 2.909, 2.905, 2.905, 2.905, 2.907, 2.912, 2.923, 2.933, 2.942, 2.944, 2.944,
2.964, 2.958, 2.943, 2.927, 2.921, 2.914, 2.909, 2.907, 2.907, 2.912, 2.916, 2.928, 2.936, 2.944, 2.947, 2.952
]
},
{
"ct": 5930,
"table":
[
3.312, 3.308, 3.301, 3.294, 3.288, 3.277, 3.268, 3.261, 3.259, 3.261, 3.267, 3.273, 3.285, 3.301, 3.303, 3.312,
3.308, 3.304, 3.294, 3.291, 3.283, 3.271, 3.263, 3.259, 3.257, 3.258, 3.261, 3.268, 3.278, 3.293, 3.299, 3.299,
3.302, 3.296, 3.288, 3.282, 3.276, 3.267, 3.259, 3.254, 3.252, 3.253, 3.256, 3.261, 3.273, 3.289, 3.292, 3.292,
3.296, 3.289, 3.282, 3.276, 3.269, 3.263, 3.256, 3.251, 3.248, 3.249, 3.251, 3.257, 3.268, 3.279, 3.284, 3.284,
3.292, 3.285, 3.279, 3.271, 3.264, 3.257, 3.249, 3.243, 3.241, 3.241, 3.246, 3.252, 3.261, 3.274, 3.275, 3.273,
3.291, 3.285, 3.276, 3.268, 3.259, 3.251, 3.242, 3.239, 3.236, 3.238, 3.244, 3.248, 3.258, 3.268, 3.269, 3.265,
3.294, 3.288, 3.275, 3.266, 3.257, 3.248, 3.239, 3.238, 3.237, 3.238, 3.243, 3.246, 3.255, 3.264, 3.264, 3.257,
3.297, 3.293, 3.279, 3.268, 3.258, 3.249, 3.238, 3.237, 3.239, 3.239, 3.243, 3.245, 3.255, 3.264, 3.264, 3.263,
3.301, 3.295, 3.281, 3.271, 3.259, 3.248, 3.237, 3.237, 3.239, 3.241, 3.243, 3.246, 3.257, 3.265, 3.266, 3.264,
3.306, 3.295, 3.279, 3.271, 3.261, 3.247, 3.235, 3.234, 3.239, 3.239, 3.243, 3.247, 3.258, 3.265, 3.265, 3.264,
3.308, 3.297, 3.279, 3.272, 3.261, 3.249, 3.239, 3.239, 3.241, 3.243, 3.245, 3.248, 3.261, 3.265, 3.266, 3.265,
3.309, 3.301, 3.286, 3.276, 3.267, 3.256, 3.246, 3.242, 3.244, 3.244, 3.249, 3.253, 3.263, 3.267, 3.271, 3.274
]
}
],
"calibrations_Cb": [
{
"ct": 2960,
"table":
[
2.133, 2.134, 2.139, 2.143, 2.148, 2.155, 2.158, 2.158, 2.158, 2.161, 2.161, 2.162, 2.159, 2.156, 2.152, 2.151,
2.132, 2.133, 2.135, 2.142, 2.147, 2.153, 2.158, 2.158, 2.158, 2.158, 2.159, 2.159, 2.157, 2.154, 2.151, 2.148,
2.133, 2.133, 2.135, 2.142, 2.149, 2.154, 2.158, 2.158, 2.157, 2.156, 2.158, 2.157, 2.155, 2.153, 2.148, 2.146,
2.133, 2.133, 2.138, 2.145, 2.149, 2.154, 2.158, 2.159, 2.158, 2.155, 2.157, 2.156, 2.153, 2.149, 2.146, 2.144,
2.133, 2.134, 2.139, 2.146, 2.149, 2.154, 2.158, 2.159, 2.159, 2.156, 2.154, 2.154, 2.149, 2.145, 2.143, 2.139,
2.135, 2.135, 2.139, 2.146, 2.151, 2.155, 2.158, 2.159, 2.158, 2.156, 2.153, 2.151, 2.146, 2.143, 2.139, 2.136,
2.135, 2.135, 2.138, 2.145, 2.151, 2.154, 2.157, 2.158, 2.157, 2.156, 2.153, 2.151, 2.147, 2.143, 2.141, 2.137,
2.135, 2.134, 2.135, 2.141, 2.149, 2.154, 2.157, 2.157, 2.157, 2.157, 2.157, 2.153, 2.149, 2.146, 2.142, 2.139,
2.132, 2.133, 2.135, 2.139, 2.148, 2.153, 2.158, 2.159, 2.159, 2.161, 2.161, 2.157, 2.154, 2.149, 2.144, 2.141,
2.132, 2.133, 2.135, 2.141, 2.149, 2.155, 2.161, 2.161, 2.162, 2.162, 2.163, 2.159, 2.154, 2.149, 2.144, 2.138,
2.136, 2.136, 2.137, 2.143, 2.149, 2.156, 2.162, 2.163, 2.162, 2.163, 2.164, 2.161, 2.157, 2.152, 2.146, 2.138,
2.137, 2.137, 2.141, 2.147, 2.152, 2.157, 2.162, 2.162, 2.159, 2.161, 2.162, 2.162, 2.157, 2.152, 2.148, 2.148
]
},
{
"ct": 4850,
"table":
[
1.463, 1.464, 1.471, 1.478, 1.479, 1.483, 1.484, 1.486, 1.486, 1.484, 1.483, 1.481, 1.478, 1.475, 1.471, 1.468,
1.463, 1.463, 1.468, 1.476, 1.479, 1.482, 1.484, 1.487, 1.486, 1.484, 1.483, 1.482, 1.478, 1.473, 1.469, 1.468,
1.463, 1.464, 1.468, 1.476, 1.479, 1.483, 1.484, 1.486, 1.486, 1.485, 1.484, 1.482, 1.477, 1.473, 1.469, 1.468,
1.463, 1.464, 1.469, 1.477, 1.481, 1.483, 1.485, 1.487, 1.487, 1.485, 1.485, 1.482, 1.478, 1.474, 1.469, 1.468,
1.465, 1.465, 1.471, 1.478, 1.481, 1.484, 1.486, 1.488, 1.488, 1.487, 1.485, 1.482, 1.477, 1.472, 1.468, 1.467,
1.465, 1.466, 1.472, 1.479, 1.482, 1.485, 1.486, 1.488, 1.488, 1.486, 1.484, 1.479, 1.475, 1.472, 1.468, 1.466,
1.466, 1.466, 1.472, 1.478, 1.482, 1.484, 1.485, 1.488, 1.487, 1.485, 1.483, 1.479, 1.475, 1.472, 1.469, 1.468,
1.465, 1.466, 1.469, 1.476, 1.481, 1.485, 1.485, 1.486, 1.486, 1.485, 1.483, 1.479, 1.477, 1.474, 1.471, 1.469,
1.464, 1.465, 1.469, 1.476, 1.481, 1.484, 1.485, 1.487, 1.487, 1.486, 1.485, 1.481, 1.478, 1.475, 1.471, 1.469,
1.463, 1.464, 1.469, 1.477, 1.481, 1.485, 1.485, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.471, 1.468,
1.464, 1.465, 1.471, 1.478, 1.482, 1.486, 1.486, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.472, 1.468,
1.465, 1.466, 1.472, 1.481, 1.483, 1.487, 1.487, 1.488, 1.488, 1.486, 1.485, 1.481, 1.479, 1.476, 1.473, 1.472
]
},
{
"ct": 5930,
"table":
[
1.443, 1.444, 1.448, 1.453, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.466, 1.462, 1.457, 1.454, 1.451,
1.443, 1.444, 1.445, 1.451, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.451,
1.444, 1.444, 1.445, 1.451, 1.459, 1.463, 1.466, 1.468, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.449,
1.444, 1.444, 1.447, 1.452, 1.459, 1.464, 1.467, 1.469, 1.471, 1.469, 1.467, 1.466, 1.461, 1.456, 1.452, 1.449,
1.444, 1.445, 1.448, 1.452, 1.459, 1.465, 1.469, 1.471, 1.471, 1.471, 1.468, 1.465, 1.461, 1.455, 1.451, 1.449,
1.445, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.471, 1.472, 1.469, 1.467, 1.465, 1.459, 1.455, 1.451, 1.447,
1.446, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.459, 1.455, 1.452, 1.449,
1.446, 1.446, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.461, 1.457, 1.454, 1.451,
1.444, 1.444, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.471, 1.471, 1.468, 1.466, 1.462, 1.458, 1.454, 1.452,
1.444, 1.444, 1.448, 1.453, 1.459, 1.466, 1.469, 1.471, 1.472, 1.472, 1.468, 1.466, 1.462, 1.458, 1.454, 1.449,
1.446, 1.447, 1.449, 1.454, 1.461, 1.466, 1.471, 1.471, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.455, 1.449,
1.447, 1.447, 1.452, 1.457, 1.462, 1.468, 1.472, 1.472, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.456, 1.455
]
}
],
"luminance_lut":
[
1.548, 1.499, 1.387, 1.289, 1.223, 1.183, 1.164, 1.154, 1.153, 1.169, 1.211, 1.265, 1.345, 1.448, 1.581, 1.619,
1.513, 1.412, 1.307, 1.228, 1.169, 1.129, 1.105, 1.098, 1.103, 1.127, 1.157, 1.209, 1.272, 1.361, 1.481, 1.583,
1.449, 1.365, 1.257, 1.175, 1.124, 1.085, 1.062, 1.054, 1.059, 1.079, 1.113, 1.151, 1.211, 1.293, 1.407, 1.488,
1.424, 1.324, 1.222, 1.139, 1.089, 1.056, 1.034, 1.031, 1.034, 1.049, 1.075, 1.115, 1.164, 1.241, 1.351, 1.446,
1.412, 1.297, 1.203, 1.119, 1.069, 1.039, 1.021, 1.016, 1.022, 1.032, 1.052, 1.086, 1.135, 1.212, 1.321, 1.439,
1.406, 1.287, 1.195, 1.115, 1.059, 1.028, 1.014, 1.012, 1.015, 1.026, 1.041, 1.074, 1.125, 1.201, 1.302, 1.425,
1.406, 1.294, 1.205, 1.126, 1.062, 1.031, 1.013, 1.009, 1.011, 1.019, 1.042, 1.079, 1.129, 1.203, 1.302, 1.435,
1.415, 1.318, 1.229, 1.146, 1.076, 1.039, 1.019, 1.014, 1.017, 1.031, 1.053, 1.093, 1.144, 1.219, 1.314, 1.436,
1.435, 1.348, 1.246, 1.164, 1.094, 1.059, 1.036, 1.032, 1.037, 1.049, 1.072, 1.114, 1.167, 1.257, 1.343, 1.462,
1.471, 1.385, 1.278, 1.189, 1.124, 1.084, 1.064, 1.061, 1.069, 1.078, 1.101, 1.146, 1.207, 1.298, 1.415, 1.496,
1.522, 1.436, 1.323, 1.228, 1.169, 1.118, 1.101, 1.094, 1.099, 1.113, 1.146, 1.194, 1.265, 1.353, 1.474, 1.571,
1.578, 1.506, 1.378, 1.281, 1.211, 1.156, 1.135, 1.134, 1.139, 1.158, 1.194, 1.251, 1.327, 1.427, 1.559, 1.611
],
"sigma": 0.00121,
"sigma_Cb": 0.00115
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2850,
"ccm":
[
1.97469, -0.71439, -0.26031,
-0.43521, 2.09769, -0.66248,
-0.04826, -0.84642, 1.89468
]
},
{
"ct": 2960,
"ccm":
[
2.12952, -0.91185, -0.21768,
-0.38018, 1.90789, -0.52771,
0.03988, -1.10079, 2.06092
]
},
{
"ct": 3580,
"ccm":
[
2.03422, -0.80048, -0.23374,
-0.39089, 1.97221, -0.58132,
-0.08969, -0.61439, 1.70408
]
},
{
"ct": 4559,
"ccm":
[
2.15423, -0.98143, -0.17279,
-0.38131, 2.14763, -0.76632,
-0.10069, -0.54383, 1.64452
]
},
{
"ct": 5881,
"ccm":
[
2.18464, -0.95493, -0.22971,
-0.36826, 2.00298, -0.63471,
-0.15219, -0.38055, 1.53274
]
},
{
"ct": 7600,
"ccm":
[
2.30687, -0.97295, -0.33392,
-0.30872, 2.32779, -1.01908,
-0.17761, -0.55891, 1.73651
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx290.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 3840
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 6813,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 890,
"reference_Y": 12900
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.67
}
},
{
"rpi.geq":
{
"offset": 187,
"slope": 0.00842
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"bayes": 0
}
},
{
"rpi.agc":
{
"speed": 0.2,
"metering_modes":
{
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
},
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 10, 30000, 60000 ],
"gain": [ 1.0, 2.0, 8.0 ]
},
"short":
{
"shutter": [ 10, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [ ],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.16,
10000, 0.16
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.7,
"luminance_lut":
[
2.844, 2.349, 2.018, 1.775, 1.599, 1.466, 1.371, 1.321, 1.306, 1.316, 1.357, 1.439, 1.552, 1.705, 1.915, 2.221,
2.576, 2.151, 1.851, 1.639, 1.478, 1.358, 1.272, 1.231, 1.218, 1.226, 1.262, 1.335, 1.438, 1.571, 1.766, 2.067,
2.381, 2.005, 1.739, 1.545, 1.389, 1.278, 1.204, 1.166, 1.153, 1.161, 1.194, 1.263, 1.356, 1.489, 1.671, 1.943,
2.242, 1.899, 1.658, 1.481, 1.329, 1.225, 1.156, 1.113, 1.096, 1.107, 1.143, 1.201, 1.289, 1.423, 1.607, 1.861,
2.152, 1.831, 1.602, 1.436, 1.291, 1.193, 1.121, 1.069, 1.047, 1.062, 1.107, 1.166, 1.249, 1.384, 1.562, 1.801,
2.104, 1.795, 1.572, 1.407, 1.269, 1.174, 1.099, 1.041, 1.008, 1.029, 1.083, 1.146, 1.232, 1.364, 1.547, 1.766,
2.104, 1.796, 1.572, 1.403, 1.264, 1.171, 1.097, 1.036, 1.001, 1.025, 1.077, 1.142, 1.231, 1.363, 1.549, 1.766,
2.148, 1.827, 1.594, 1.413, 1.276, 1.184, 1.114, 1.062, 1.033, 1.049, 1.092, 1.153, 1.242, 1.383, 1.577, 1.795,
2.211, 1.881, 1.636, 1.455, 1.309, 1.214, 1.149, 1.104, 1.081, 1.089, 1.125, 1.184, 1.273, 1.423, 1.622, 1.846,
2.319, 1.958, 1.698, 1.516, 1.362, 1.262, 1.203, 1.156, 1.137, 1.142, 1.171, 1.229, 1.331, 1.484, 1.682, 1.933,
2.459, 2.072, 1.789, 1.594, 1.441, 1.331, 1.261, 1.219, 1.199, 1.205, 1.232, 1.301, 1.414, 1.571, 1.773, 2.052,
2.645, 2.206, 1.928, 1.728, 1.559, 1.451, 1.352, 1.301, 1.282, 1.289, 1.319, 1.395, 1.519, 1.685, 1.904, 2.227
],
"sigma": 0.005,
"sigma_Cb": 0.005
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 3900,
"ccm":
[
1.54659, -0.17707, -0.36953,
-0.51471, 1.72733, -0.21262,
0.06667, -0.92279, 1.85612
]
}
]
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/ov64a40.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 17861,
"reference_gain": 2.0,
"reference_aperture": 1.0,
"reference_lux": 1073,
"reference_Y": 9022
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.984
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.01121
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
},
"bayes": 1,
"ct_curve":
[
2300.0, 1.0522, 0.4091,
2700.0, 0.7884, 0.4327,
3000.0, 0.7597, 0.4421,
4000.0, 0.5972, 0.5404,
4150.0, 0.5598, 0.5779,
6500.0, 0.4388, 0.7582
],
"sensitivity_r": 1.0,
"sensitivity_b": 1.0,
"transverse_pos": 0.0558,
"transverse_neg": 0.04278
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.8,
"calibrations_Cr": [
{
"ct": 6500,
"table":
[
2.437, 2.415, 2.392, 2.378, 2.369, 2.353, 2.344, 2.336, 2.329, 2.325, 2.325, 2.325, 2.333, 2.344, 2.366, 2.381,
2.434, 2.405, 2.386, 2.369, 2.361, 2.334, 2.314, 2.302, 2.295, 2.289, 2.289, 2.303, 2.327, 2.334, 2.356, 2.378,
2.434, 2.405, 2.385, 2.363, 2.334, 2.314, 2.289, 2.277, 2.269, 2.262, 2.262, 2.283, 2.303, 2.328, 2.352, 2.375,
2.434, 2.405, 2.385, 2.348, 2.315, 2.289, 2.277, 2.258, 2.251, 2.242, 2.249, 2.258, 2.283, 2.321, 2.352, 2.375,
2.434, 2.413, 2.385, 2.343, 2.311, 2.282, 2.258, 2.251, 2.229, 2.233, 2.242, 2.251, 2.281, 2.321, 2.356, 2.375,
2.437, 2.418, 2.388, 2.343, 2.311, 2.282, 2.251, 2.229, 2.221, 2.226, 2.233, 2.251, 2.281, 2.322, 2.361, 2.381,
2.444, 2.422, 2.393, 2.351, 2.314, 2.284, 2.251, 2.227, 2.221, 2.227, 2.234, 2.256, 2.287, 2.326, 2.366, 2.389,
2.445, 2.424, 2.395, 2.353, 2.316, 2.287, 2.266, 2.251, 2.228, 2.234, 2.251, 2.259, 2.289, 2.331, 2.371, 2.395,
2.445, 2.424, 2.399, 2.364, 2.329, 2.308, 2.287, 2.266, 2.259, 2.254, 2.259, 2.283, 2.304, 2.343, 2.375, 2.395,
2.445, 2.425, 2.407, 2.385, 2.364, 2.329, 2.308, 2.299, 2.291, 2.284, 2.284, 2.304, 2.335, 2.354, 2.381, 2.399,
2.449, 2.427, 2.418, 2.407, 2.385, 2.364, 2.349, 2.338, 2.333, 2.326, 2.326, 2.335, 2.354, 2.374, 2.389, 2.408,
2.458, 2.441, 2.427, 2.411, 2.403, 2.395, 2.391, 2.383, 2.375, 2.369, 2.369, 2.369, 2.369, 2.385, 2.408, 2.418
]
}
],
"calibrations_Cb": [
{
"ct": 6500,
"table":
[
1.297, 1.297, 1.289, 1.289, 1.289, 1.291, 1.293, 1.294, 1.294, 1.294, 1.294, 1.296, 1.298, 1.304, 1.312, 1.313,
1.297, 1.289, 1.286, 1.286, 1.287, 1.289, 1.292, 1.294, 1.294, 1.294, 1.294, 1.294, 1.296, 1.298, 1.306, 1.312,
1.289, 1.286, 1.283, 1.283, 1.285, 1.287, 1.291, 1.294, 1.294, 1.292, 1.291, 1.289, 1.293, 1.294, 1.298, 1.304,
1.283, 1.282, 1.279, 1.281, 1.282, 1.285, 1.287, 1.294, 1.294, 1.291, 1.289, 1.289, 1.289, 1.293, 1.294, 1.298,
1.281, 1.279, 1.279, 1.279, 1.281, 1.283, 1.287, 1.292, 1.292, 1.291, 1.291, 1.289, 1.289, 1.291, 1.294, 1.297,
1.279, 1.277, 1.277, 1.279, 1.281, 1.282, 1.286, 1.289, 1.291, 1.291, 1.291, 1.291, 1.289, 1.291, 1.293, 1.297,
1.277, 1.275, 1.275, 1.278, 1.279, 1.281, 1.284, 1.287, 1.289, 1.291, 1.291, 1.291, 1.289, 1.289, 1.292, 1.297,
1.277, 1.275, 1.274, 1.275, 1.277, 1.278, 1.279, 1.284, 1.285, 1.285, 1.286, 1.288, 1.289, 1.289, 1.292, 1.297,
1.277, 1.272, 1.272, 1.274, 1.274, 1.277, 1.279, 1.282, 1.284, 1.284, 1.285, 1.286, 1.288, 1.289, 1.292, 1.297,
1.277, 1.272, 1.272, 1.273, 1.274, 1.276, 1.279, 1.282, 1.284, 1.284, 1.286, 1.286, 1.288, 1.289, 1.293, 1.297,
1.279, 1.272, 1.271, 1.272, 1.274, 1.276, 1.279, 1.283, 1.284, 1.284, 1.285, 1.286, 1.288, 1.291, 1.294, 1.299,
1.281, 1.274, 1.271, 1.271, 1.273, 1.276, 1.278, 1.282, 1.284, 1.284, 1.285, 1.286, 1.286, 1.291, 1.295, 1.302
]
}
],
"luminance_lut":
[
3.811, 3.611, 3.038, 2.632, 2.291, 2.044, 1.967, 1.957, 1.957, 1.957, 2.009, 2.222, 2.541, 2.926, 3.455, 3.652,
3.611, 3.135, 2.636, 2.343, 2.044, 1.846, 1.703, 1.626, 1.626, 1.671, 1.796, 1.983, 2.266, 2.549, 3.007, 3.455,
3.135, 2.781, 2.343, 2.044, 1.831, 1.554, 1.411, 1.337, 1.337, 1.379, 1.502, 1.749, 1.983, 2.266, 2.671, 3.007,
2.903, 2.538, 2.149, 1.831, 1.554, 1.401, 1.208, 1.145, 1.145, 1.183, 1.339, 1.502, 1.749, 2.072, 2.446, 2.801,
2.812, 2.389, 2.018, 1.684, 1.401, 1.208, 1.139, 1.028, 1.028, 1.109, 1.183, 1.339, 1.604, 1.939, 2.309, 2.723,
2.799, 2.317, 1.948, 1.606, 1.327, 1.139, 1.028, 1.019, 1.001, 1.021, 1.109, 1.272, 1.531, 1.869, 2.246, 2.717,
2.799, 2.317, 1.948, 1.606, 1.327, 1.139, 1.027, 1.006, 1.001, 1.007, 1.109, 1.272, 1.531, 1.869, 2.246, 2.717,
2.799, 2.372, 1.997, 1.661, 1.378, 1.184, 1.118, 1.019, 1.012, 1.103, 1.158, 1.326, 1.589, 1.926, 2.302, 2.717,
2.884, 2.507, 2.116, 1.795, 1.511, 1.361, 1.184, 1.118, 1.118, 1.158, 1.326, 1.461, 1.726, 2.056, 2.434, 2.799,
3.083, 2.738, 2.303, 1.989, 1.783, 1.511, 1.361, 1.291, 1.291, 1.337, 1.461, 1.726, 1.942, 2.251, 2.657, 2.999,
3.578, 3.083, 2.589, 2.303, 1.989, 1.783, 1.637, 1.563, 1.563, 1.613, 1.743, 1.942, 2.251, 2.537, 2.999, 3.492,
3.764, 3.578, 2.999, 2.583, 2.237, 1.986, 1.913, 1.905, 1.905, 1.905, 1.962, 2.196, 2.525, 2.932, 3.492, 3.659
],
"sigma": 0.005,
"sigma_Cb": 0.005
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2300,
"ccm":
[
1.77644, -0.14825, -0.62819,
-0.25816, 1.66348, -0.40532,
-0.21633, -1.95132, 3.16765
]
},
{
"ct": 2700,
"ccm":
[
1.53605, 0.03047, -0.56652,
-0.27159, 1.78525, -0.51366,
-0.13581, -1.22128, 2.35709
]
},
{
"ct": 3000,
"ccm":
[
1.72928, -0.18819, -0.54108,
-0.44398, 2.04756, -0.60358,
-0.13203, -0.94711, 2.07913
]
},
{
"ct": 4000,
"ccm":
[
1.69895, -0.23055, -0.46841,
-0.33934, 1.80391, -0.46456,
-0.13902, -0.75385, 1.89287
]
},
{
"ct": 4150,
"ccm":
[
2.08494, -0.68698, -0.39796,
-0.37928, 1.78795, -0.40867,
-0.11537, -0.74686, 1.86223
]
},
{
"ct": 6500,
"ccm":
[
1.69813, -0.27304, -0.42509,
-0.23364, 1.87586, -0.64221,
-0.07587, -0.62348, 1.69935
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.af":
{
"ranges":
{
"normal":
{
"min": 0.0,
"max": 12.0,
"default": 1.0
},
"macro":
{
"min": 3.0,
"max": 15.0,
"default": 4.0
}
},
"speeds":
{
"normal":
{
"step_coarse": 1.0,
"step_fine": 0.25,
"contrast_ratio": 0.75,
"pdaf_gain": -0.02,
"pdaf_squelch": 0.125,
"max_slew": 2.0,
"pdaf_frames": 0,
"dropout_frames": 0,
"step_frames": 4
}
},
"conf_epsilon": 8,
"conf_thresh": 16,
"conf_clip": 512,
"skip_frames": 5,
"map": [ 0.0, 0, 15.0, 1023 ]
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx219.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27685,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 998,
"reference_Y": 12744
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 3.67
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01633
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2498.0, 0.9309, 0.3599,
2911.0, 0.8682, 0.4283,
2919.0, 0.8358, 0.4621,
3627.0, 0.7646, 0.5327,
4600.0, 0.6079, 0.6721,
5716.0, 0.5712, 0.7017,
8575.0, 0.4331, 0.8037
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.04791,
"transverse_neg": 0.04881
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.7,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.487, 1.481, 1.481, 1.445, 1.389, 1.327, 1.307, 1.307, 1.307, 1.309, 1.341, 1.405, 1.458, 1.494, 1.494, 1.497,
1.491, 1.481, 1.448, 1.397, 1.331, 1.275, 1.243, 1.229, 1.229, 1.249, 1.287, 1.349, 1.409, 1.463, 1.494, 1.497,
1.491, 1.469, 1.405, 1.331, 1.275, 1.217, 1.183, 1.172, 1.172, 1.191, 1.231, 1.287, 1.349, 1.424, 1.484, 1.499,
1.487, 1.444, 1.363, 1.283, 1.217, 1.183, 1.148, 1.138, 1.138, 1.159, 1.191, 1.231, 1.302, 1.385, 1.461, 1.492,
1.481, 1.423, 1.334, 1.253, 1.189, 1.148, 1.135, 1.119, 1.123, 1.137, 1.159, 1.203, 1.272, 1.358, 1.442, 1.488,
1.479, 1.413, 1.321, 1.236, 1.176, 1.139, 1.118, 1.114, 1.116, 1.123, 1.149, 1.192, 1.258, 1.344, 1.432, 1.487,
1.479, 1.413, 1.321, 1.236, 1.176, 1.139, 1.116, 1.114, 1.115, 1.123, 1.149, 1.192, 1.258, 1.344, 1.432, 1.487,
1.479, 1.425, 1.336, 1.251, 1.189, 1.149, 1.136, 1.118, 1.121, 1.138, 1.158, 1.206, 1.275, 1.358, 1.443, 1.488,
1.488, 1.448, 1.368, 1.285, 1.219, 1.189, 1.149, 1.139, 1.139, 1.158, 1.195, 1.235, 1.307, 1.387, 1.462, 1.493,
1.496, 1.475, 1.411, 1.337, 1.284, 1.219, 1.189, 1.176, 1.176, 1.195, 1.235, 1.296, 1.356, 1.429, 1.487, 1.501,
1.495, 1.489, 1.458, 1.407, 1.337, 1.287, 1.253, 1.239, 1.239, 1.259, 1.296, 1.356, 1.419, 1.472, 1.499, 1.499,
1.494, 1.489, 1.489, 1.453, 1.398, 1.336, 1.317, 1.317, 1.317, 1.321, 1.351, 1.416, 1.467, 1.501, 1.501, 1.499
]
},
{
"ct": 3850,
"table":
[
1.694, 1.688, 1.688, 1.649, 1.588, 1.518, 1.495, 1.495, 1.495, 1.497, 1.532, 1.602, 1.659, 1.698, 1.698, 1.703,
1.698, 1.688, 1.653, 1.597, 1.525, 1.464, 1.429, 1.413, 1.413, 1.437, 1.476, 1.542, 1.606, 1.665, 1.698, 1.703,
1.697, 1.673, 1.605, 1.525, 1.464, 1.401, 1.369, 1.354, 1.354, 1.377, 1.417, 1.476, 1.542, 1.623, 1.687, 1.705,
1.692, 1.646, 1.561, 1.472, 1.401, 1.368, 1.337, 1.323, 1.324, 1.348, 1.377, 1.417, 1.492, 1.583, 1.661, 1.697,
1.686, 1.625, 1.528, 1.439, 1.372, 1.337, 1.321, 1.311, 1.316, 1.324, 1.348, 1.389, 1.461, 1.553, 1.642, 1.694,
1.684, 1.613, 1.514, 1.423, 1.359, 1.328, 1.311, 1.306, 1.306, 1.316, 1.339, 1.378, 1.446, 1.541, 1.633, 1.693,
1.684, 1.613, 1.514, 1.423, 1.359, 1.328, 1.311, 1.305, 1.305, 1.316, 1.339, 1.378, 1.446, 1.541, 1.633, 1.693,
1.685, 1.624, 1.529, 1.438, 1.372, 1.336, 1.324, 1.309, 1.314, 1.323, 1.348, 1.392, 1.462, 1.555, 1.646, 1.694,
1.692, 1.648, 1.561, 1.473, 1.403, 1.372, 1.336, 1.324, 1.324, 1.348, 1.378, 1.423, 1.495, 1.585, 1.667, 1.701,
1.701, 1.677, 1.608, 1.527, 1.471, 1.403, 1.375, 1.359, 1.359, 1.378, 1.423, 1.488, 1.549, 1.631, 1.694, 1.709,
1.702, 1.694, 1.656, 1.601, 1.527, 1.473, 1.441, 1.424, 1.424, 1.443, 1.488, 1.549, 1.621, 1.678, 1.706, 1.707,
1.699, 1.694, 1.694, 1.654, 1.593, 1.525, 1.508, 1.508, 1.508, 1.509, 1.546, 1.614, 1.674, 1.708, 1.708, 1.707
]
},
{
"ct": 6000,
"table":
[
2.179, 2.176, 2.176, 2.125, 2.048, 1.975, 1.955, 1.954, 1.954, 1.956, 1.993, 2.071, 2.141, 2.184, 2.185, 2.188,
2.189, 2.176, 2.128, 2.063, 1.973, 1.908, 1.872, 1.856, 1.856, 1.876, 1.922, 1.999, 2.081, 2.144, 2.184, 2.192,
2.187, 2.152, 2.068, 1.973, 1.907, 1.831, 1.797, 1.786, 1.786, 1.804, 1.853, 1.922, 1.999, 2.089, 2.166, 2.191,
2.173, 2.117, 2.013, 1.908, 1.831, 1.791, 1.755, 1.749, 1.749, 1.767, 1.804, 1.853, 1.939, 2.041, 2.135, 2.181,
2.166, 2.089, 1.975, 1.869, 1.792, 1.755, 1.741, 1.731, 1.734, 1.749, 1.767, 1.818, 1.903, 2.005, 2.111, 2.173,
2.165, 2.074, 1.956, 1.849, 1.777, 1.742, 1.729, 1.725, 1.729, 1.734, 1.758, 1.804, 1.884, 1.991, 2.099, 2.172,
2.165, 2.074, 1.956, 1.849, 1.777, 1.742, 1.727, 1.724, 1.725, 1.734, 1.758, 1.804, 1.884, 1.991, 2.099, 2.172,
2.166, 2.085, 1.975, 1.869, 1.791, 1.755, 1.741, 1.729, 1.733, 1.749, 1.769, 1.819, 1.904, 2.009, 2.114, 2.174,
2.174, 2.118, 2.015, 1.913, 1.831, 1.791, 1.755, 1.749, 1.749, 1.769, 1.811, 1.855, 1.943, 2.047, 2.139, 2.183,
2.187, 2.151, 2.072, 1.979, 1.911, 1.831, 1.801, 1.791, 1.791, 1.811, 1.855, 1.933, 2.006, 2.101, 2.173, 2.197,
2.189, 2.178, 2.132, 2.069, 1.979, 1.913, 1.879, 1.867, 1.867, 1.891, 1.933, 2.006, 2.091, 2.156, 2.195, 2.197,
2.181, 2.179, 2.178, 2.131, 2.057, 1.981, 1.965, 1.965, 1.965, 1.969, 1.999, 2.083, 2.153, 2.197, 2.197, 2.196
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
1.967, 1.961, 1.955, 1.953, 1.954, 1.957, 1.961, 1.963, 1.963, 1.961, 1.959, 1.957, 1.954, 1.951, 1.951, 1.955,
1.961, 1.959, 1.957, 1.956, 1.962, 1.967, 1.975, 1.979, 1.979, 1.975, 1.971, 1.967, 1.957, 1.952, 1.951, 1.951,
1.959, 1.959, 1.959, 1.966, 1.976, 1.989, 1.999, 2.004, 2.003, 1.997, 1.991, 1.981, 1.967, 1.956, 1.951, 1.951,
1.959, 1.962, 1.967, 1.978, 1.993, 2.009, 2.021, 2.028, 2.026, 2.021, 2.011, 1.995, 1.981, 1.964, 1.953, 1.951,
1.961, 1.965, 1.977, 1.993, 2.009, 2.023, 2.041, 2.047, 2.047, 2.037, 2.024, 2.011, 1.995, 1.975, 1.958, 1.953,
1.963, 1.968, 1.981, 2.001, 2.019, 2.039, 2.046, 2.052, 2.052, 2.051, 2.035, 2.021, 2.001, 1.978, 1.959, 1.955,
1.961, 1.966, 1.981, 2.001, 2.019, 2.038, 2.043, 2.051, 2.052, 2.042, 2.034, 2.019, 2.001, 1.978, 1.959, 1.954,
1.957, 1.961, 1.972, 1.989, 2.003, 2.021, 2.038, 2.039, 2.039, 2.034, 2.019, 2.004, 1.988, 1.971, 1.954, 1.949,
1.952, 1.953, 1.959, 1.972, 1.989, 2.003, 2.016, 2.019, 2.019, 2.014, 2.003, 1.988, 1.971, 1.955, 1.948, 1.947,
1.949, 1.948, 1.949, 1.957, 1.971, 1.978, 1.991, 1.994, 1.994, 1.989, 1.979, 1.967, 1.954, 1.946, 1.947, 1.947,
1.949, 1.946, 1.944, 1.946, 1.949, 1.954, 1.962, 1.967, 1.967, 1.963, 1.956, 1.948, 1.943, 1.943, 1.946, 1.949,
1.951, 1.946, 1.944, 1.942, 1.943, 1.943, 1.947, 1.948, 1.949, 1.947, 1.945, 1.941, 1.938, 1.939, 1.948, 1.952
]
},
{
"ct": 3850,
"table":
[
1.726, 1.724, 1.722, 1.723, 1.731, 1.735, 1.743, 1.746, 1.746, 1.741, 1.735, 1.729, 1.725, 1.721, 1.721, 1.721,
1.724, 1.723, 1.723, 1.727, 1.735, 1.744, 1.749, 1.756, 1.756, 1.749, 1.744, 1.735, 1.727, 1.719, 1.719, 1.719,
1.723, 1.723, 1.724, 1.735, 1.746, 1.759, 1.767, 1.775, 1.775, 1.766, 1.758, 1.746, 1.735, 1.723, 1.718, 1.716,
1.723, 1.725, 1.732, 1.746, 1.759, 1.775, 1.782, 1.792, 1.792, 1.782, 1.772, 1.759, 1.745, 1.729, 1.718, 1.716,
1.725, 1.729, 1.738, 1.756, 1.775, 1.785, 1.796, 1.803, 1.804, 1.794, 1.783, 1.772, 1.757, 1.736, 1.722, 1.718,
1.728, 1.731, 1.741, 1.759, 1.781, 1.795, 1.803, 1.806, 1.808, 1.805, 1.791, 1.779, 1.762, 1.739, 1.722, 1.721,
1.727, 1.731, 1.741, 1.759, 1.781, 1.791, 1.799, 1.804, 1.806, 1.801, 1.791, 1.779, 1.762, 1.739, 1.722, 1.717,
1.722, 1.724, 1.733, 1.751, 1.768, 1.781, 1.791, 1.796, 1.799, 1.791, 1.781, 1.766, 1.754, 1.731, 1.717, 1.714,
1.718, 1.718, 1.724, 1.737, 1.752, 1.768, 1.776, 1.782, 1.784, 1.781, 1.766, 1.754, 1.737, 1.724, 1.713, 1.709,
1.716, 1.715, 1.716, 1.725, 1.737, 1.749, 1.756, 1.763, 1.764, 1.762, 1.749, 1.737, 1.724, 1.717, 1.709, 1.708,
1.715, 1.714, 1.712, 1.715, 1.722, 1.729, 1.736, 1.741, 1.742, 1.739, 1.731, 1.723, 1.717, 1.712, 1.711, 1.709,
1.716, 1.714, 1.711, 1.712, 1.715, 1.719, 1.723, 1.728, 1.731, 1.729, 1.723, 1.718, 1.711, 1.711, 1.713, 1.713
]
},
{
"ct": 6000,
"table":
[
1.374, 1.372, 1.373, 1.374, 1.375, 1.378, 1.378, 1.381, 1.382, 1.382, 1.378, 1.373, 1.372, 1.369, 1.365, 1.365,
1.371, 1.371, 1.372, 1.374, 1.378, 1.381, 1.384, 1.386, 1.388, 1.387, 1.384, 1.377, 1.372, 1.368, 1.364, 1.362,
1.369, 1.371, 1.372, 1.377, 1.383, 1.391, 1.394, 1.396, 1.397, 1.395, 1.391, 1.382, 1.374, 1.369, 1.362, 1.361,
1.369, 1.371, 1.375, 1.383, 1.391, 1.399, 1.402, 1.404, 1.405, 1.403, 1.398, 1.391, 1.379, 1.371, 1.363, 1.361,
1.371, 1.373, 1.378, 1.388, 1.399, 1.407, 1.411, 1.413, 1.413, 1.411, 1.405, 1.397, 1.385, 1.374, 1.366, 1.362,
1.371, 1.374, 1.379, 1.389, 1.405, 1.411, 1.414, 1.414, 1.415, 1.415, 1.411, 1.401, 1.388, 1.376, 1.367, 1.363,
1.371, 1.373, 1.379, 1.389, 1.405, 1.408, 1.413, 1.414, 1.414, 1.413, 1.409, 1.401, 1.388, 1.376, 1.367, 1.362,
1.366, 1.369, 1.374, 1.384, 1.396, 1.404, 1.407, 1.408, 1.408, 1.408, 1.401, 1.395, 1.382, 1.371, 1.363, 1.359,
1.364, 1.365, 1.368, 1.375, 1.386, 1.396, 1.399, 1.401, 1.399, 1.399, 1.395, 1.385, 1.374, 1.365, 1.359, 1.357,
1.361, 1.363, 1.365, 1.368, 1.377, 1.384, 1.388, 1.391, 1.391, 1.388, 1.385, 1.375, 1.366, 1.361, 1.358, 1.356,
1.361, 1.362, 1.362, 1.364, 1.367, 1.373, 1.376, 1.377, 1.377, 1.375, 1.373, 1.366, 1.362, 1.358, 1.358, 1.358,
1.361, 1.362, 1.362, 1.362, 1.363, 1.367, 1.369, 1.368, 1.367, 1.367, 1.367, 1.364, 1.358, 1.357, 1.358, 1.359
]
}
],
"luminance_lut":
[
2.716, 2.568, 2.299, 2.065, 1.845, 1.693, 1.605, 1.597, 1.596, 1.634, 1.738, 1.914, 2.145, 2.394, 2.719, 2.901,
2.593, 2.357, 2.093, 1.876, 1.672, 1.528, 1.438, 1.393, 1.394, 1.459, 1.569, 1.731, 1.948, 2.169, 2.481, 2.756,
2.439, 2.197, 1.922, 1.691, 1.521, 1.365, 1.266, 1.222, 1.224, 1.286, 1.395, 1.573, 1.747, 1.988, 2.299, 2.563,
2.363, 2.081, 1.797, 1.563, 1.376, 1.244, 1.152, 1.099, 1.101, 1.158, 1.276, 1.421, 1.607, 1.851, 2.163, 2.455,
2.342, 2.003, 1.715, 1.477, 1.282, 1.152, 1.074, 1.033, 1.035, 1.083, 1.163, 1.319, 1.516, 1.759, 2.064, 2.398,
2.342, 1.985, 1.691, 1.446, 1.249, 1.111, 1.034, 1.004, 1.004, 1.028, 1.114, 1.274, 1.472, 1.716, 2.019, 2.389,
2.342, 1.991, 1.691, 1.446, 1.249, 1.112, 1.034, 1.011, 1.005, 1.035, 1.114, 1.274, 1.472, 1.716, 2.019, 2.389,
2.365, 2.052, 1.751, 1.499, 1.299, 1.171, 1.089, 1.039, 1.042, 1.084, 1.162, 1.312, 1.516, 1.761, 2.059, 2.393,
2.434, 2.159, 1.856, 1.601, 1.403, 1.278, 1.166, 1.114, 1.114, 1.162, 1.266, 1.402, 1.608, 1.847, 2.146, 2.435,
2.554, 2.306, 2.002, 1.748, 1.563, 1.396, 1.299, 1.247, 1.243, 1.279, 1.386, 1.551, 1.746, 1.977, 2.272, 2.518,
2.756, 2.493, 2.195, 1.947, 1.739, 1.574, 1.481, 1.429, 1.421, 1.457, 1.559, 1.704, 1.929, 2.159, 2.442, 2.681,
2.935, 2.739, 2.411, 2.151, 1.922, 1.749, 1.663, 1.628, 1.625, 1.635, 1.716, 1.872, 2.113, 2.368, 2.663, 2.824
],
"sigma": 0.00381,
"sigma_Cb": 0.00216
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2860,
"ccm":
[
2.12089, -0.52461, -0.59629,
-0.85342, 2.80445, -0.95103,
-0.26897, -1.14788, 2.41685
]
},
{
"ct": 2960,
"ccm":
[
2.26962, -0.54174, -0.72789,
-0.77008, 2.60271, -0.83262,
-0.26036, -1.51254, 2.77289
]
},
{
"ct": 3603,
"ccm":
[
2.18644, -0.66148, -0.52496,
-0.77828, 2.69474, -0.91645,
-0.25239, -0.83059, 2.08298
]
},
{
"ct": 4650,
"ccm":
[
2.18174, -0.70887, -0.47287,
-0.70196, 2.76426, -1.06231,
-0.25157, -0.71978, 1.97135
]
},
{
"ct": 5858,
"ccm":
[
2.32392, -0.88421, -0.43971,
-0.63821, 2.58348, -0.94527,
-0.28541, -0.54112, 1.82653
]
},
{
"ct": 7580,
"ccm":
[
2.21175, -0.53242, -0.67933,
-0.57875, 3.07922, -1.50047,
-0.27709, -0.73338, 2.01048
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
}
|
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx708_wide_noir.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 9989,
"reference_gain": 1.23,
"reference_aperture": 1.0,
"reference_lux": 980,
"reference_Y": 8345
}
},
{
"rpi.noise":
{
"reference_constant": 16.0,
"reference_slope": 4.0
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.00287
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 0,
"ct_curve":
[
2750.0, 0.7881, 0.2849,
2940.0, 0.7559, 0.3103,
3650.0, 0.6291, 0.4206,
4625.0, 0.5336, 0.5161,
5715.0, 0.4668, 0.5898
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.01165,
"transverse_neg": 0.01601
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.529, 1.526, 1.522, 1.506, 1.489, 1.473, 1.458, 1.456, 1.456, 1.458, 1.474, 1.493, 1.513, 1.531, 1.541, 1.544,
1.527, 1.523, 1.511, 1.491, 1.474, 1.459, 1.445, 1.441, 1.441, 1.446, 1.461, 1.479, 1.499, 1.521, 1.536, 1.541,
1.524, 1.515, 1.498, 1.477, 1.459, 1.444, 1.431, 1.426, 1.426, 1.435, 1.446, 1.466, 1.487, 1.507, 1.528, 1.538,
1.522, 1.512, 1.491, 1.468, 1.447, 1.431, 1.423, 1.417, 1.418, 1.425, 1.435, 1.455, 1.479, 1.499, 1.523, 1.537,
1.522, 1.509, 1.485, 1.463, 1.441, 1.423, 1.416, 1.413, 1.415, 1.418, 1.429, 1.449, 1.473, 1.495, 1.521, 1.538,
1.522, 1.508, 1.483, 1.461, 1.438, 1.421, 1.413, 1.412, 1.412, 1.415, 1.428, 1.447, 1.471, 1.493, 1.519, 1.538,
1.522, 1.509, 1.484, 1.462, 1.439, 1.421, 1.414, 1.411, 1.412, 1.416, 1.428, 1.447, 1.471, 1.493, 1.519, 1.537,
1.523, 1.511, 1.487, 1.465, 1.443, 1.424, 1.417, 1.413, 1.415, 1.419, 1.429, 1.451, 1.473, 1.494, 1.519, 1.536,
1.524, 1.514, 1.493, 1.471, 1.451, 1.434, 1.424, 1.419, 1.419, 1.428, 1.437, 1.457, 1.477, 1.498, 1.521, 1.538,
1.527, 1.521, 1.503, 1.481, 1.462, 1.449, 1.434, 1.429, 1.429, 1.437, 1.451, 1.469, 1.488, 1.508, 1.527, 1.539,
1.529, 1.527, 1.515, 1.495, 1.477, 1.462, 1.449, 1.444, 1.444, 1.451, 1.467, 1.481, 1.499, 1.519, 1.535, 1.543,
1.534, 1.531, 1.527, 1.512, 1.492, 1.476, 1.463, 1.461, 1.461, 1.464, 1.479, 1.495, 1.515, 1.533, 1.543, 1.546
]
},
{
"ct": 5000,
"table":
[
2.603, 2.599, 2.591, 2.567, 2.539, 2.515, 2.489, 2.489, 2.489, 2.491, 2.516, 2.543, 2.574, 2.597, 2.614, 2.617,
2.596, 2.591, 2.571, 2.542, 2.516, 2.489, 2.464, 2.458, 2.458, 2.469, 2.492, 2.518, 2.547, 2.576, 2.602, 2.614,
2.591, 2.576, 2.546, 2.519, 2.489, 2.464, 2.437, 2.427, 2.427, 2.441, 2.467, 2.492, 2.525, 2.553, 2.586, 2.605,
2.588, 2.568, 2.534, 2.503, 2.472, 2.437, 2.423, 2.409, 2.411, 2.425, 2.441, 2.475, 2.513, 2.541, 2.577, 2.602,
2.588, 2.565, 2.527, 2.494, 2.461, 2.425, 2.409, 2.399, 2.403, 2.409, 2.431, 2.466, 2.503, 2.534, 2.571, 2.601,
2.586, 2.561, 2.525, 2.491, 2.454, 2.418, 2.399, 2.396, 2.395, 2.402, 2.424, 2.461, 2.501, 2.531, 2.567, 2.599,
2.583, 2.559, 2.525, 2.491, 2.454, 2.418, 2.398, 2.393, 2.393, 2.401, 2.423, 2.459, 2.498, 2.531, 2.566, 2.597,
2.583, 2.559, 2.526, 2.494, 2.458, 2.421, 2.404, 2.397, 2.399, 2.404, 2.426, 2.461, 2.501, 2.531, 2.566, 2.596,
2.583, 2.563, 2.531, 2.501, 2.469, 2.435, 2.419, 2.405, 2.404, 2.422, 2.435, 2.471, 2.505, 2.537, 2.572, 2.596,
2.585, 2.571, 2.539, 2.516, 2.486, 2.458, 2.435, 2.424, 2.424, 2.435, 2.459, 2.489, 2.521, 2.546, 2.579, 2.601,
2.589, 2.578, 2.557, 2.532, 2.506, 2.483, 2.458, 2.449, 2.449, 2.459, 2.485, 2.507, 2.535, 2.563, 2.591, 2.605,
2.589, 2.586, 2.575, 2.551, 2.525, 2.503, 2.481, 2.476, 2.476, 2.481, 2.504, 2.526, 2.555, 2.583, 2.604, 2.611
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
3.311, 3.339, 3.369, 3.374, 3.371, 3.363, 3.356, 3.353, 3.353, 3.353, 3.357, 3.362, 3.362, 3.356, 3.328, 3.311,
3.321, 3.354, 3.374, 3.374, 3.368, 3.359, 3.352, 3.349, 3.347, 3.347, 3.349, 3.357, 3.361, 3.359, 3.343, 3.324,
3.334, 3.368, 3.375, 3.374, 3.365, 3.356, 3.349, 3.347, 3.346, 3.346, 3.347, 3.349, 3.358, 3.361, 3.357, 3.336,
3.346, 3.378, 3.378, 3.369, 3.363, 3.358, 3.351, 3.348, 3.347, 3.346, 3.347, 3.348, 3.354, 3.364, 3.363, 3.345,
3.351, 3.381, 3.381, 3.368, 3.361, 3.357, 3.349, 3.347, 3.347, 3.345, 3.345, 3.347, 3.353, 3.364, 3.364, 3.347,
3.353, 3.379, 3.379, 3.366, 3.359, 3.351, 3.348, 3.343, 3.342, 3.342, 3.343, 3.345, 3.351, 3.363, 3.363, 3.347,
3.353, 3.376, 3.376, 3.363, 3.351, 3.347, 3.343, 3.338, 3.336, 3.338, 3.339, 3.343, 3.351, 3.361, 3.361, 3.347,
3.351, 3.374, 3.374, 3.359, 3.351, 3.345, 3.338, 3.334, 3.333, 3.334, 3.336, 3.339, 3.347, 3.358, 3.358, 3.345,
3.346, 3.368, 3.368, 3.359, 3.349, 3.343, 3.336, 3.332, 3.327, 3.331, 3.333, 3.337, 3.346, 3.356, 3.356, 3.341,
3.336, 3.362, 3.364, 3.359, 3.351, 3.342, 3.334, 3.324, 3.324, 3.325, 3.329, 3.336, 3.346, 3.351, 3.351, 3.333,
3.324, 3.349, 3.359, 3.358, 3.352, 3.341, 3.329, 3.323, 3.321, 3.322, 3.326, 3.336, 3.346, 3.347, 3.339, 3.319,
3.311, 3.328, 3.352, 3.354, 3.352, 3.341, 3.329, 3.321, 3.319, 3.321, 3.324, 3.338, 3.343, 3.343, 3.319, 3.312
]
},
{
"ct": 5000,
"table":
[
1.634, 1.647, 1.665, 1.668, 1.668, 1.664, 1.662, 1.662, 1.661, 1.661, 1.661, 1.663, 1.663, 1.659, 1.643, 1.636,
1.639, 1.656, 1.668, 1.669, 1.668, 1.666, 1.664, 1.663, 1.663, 1.661, 1.661, 1.662, 1.663, 1.662, 1.654, 1.642,
1.645, 1.663, 1.669, 1.668, 1.667, 1.667, 1.667, 1.668, 1.668, 1.665, 1.662, 1.661, 1.662, 1.664, 1.661, 1.649,
1.651, 1.669, 1.669, 1.667, 1.666, 1.668, 1.669, 1.672, 1.672, 1.668, 1.665, 1.661, 1.661, 1.665, 1.665, 1.655,
1.654, 1.669, 1.669, 1.666, 1.666, 1.669, 1.672, 1.673, 1.673, 1.671, 1.666, 1.661, 1.661, 1.665, 1.665, 1.659,
1.654, 1.669, 1.669, 1.666, 1.666, 1.669, 1.671, 1.673, 1.672, 1.669, 1.667, 1.661, 1.661, 1.665, 1.665, 1.659,
1.654, 1.668, 1.668, 1.664, 1.663, 1.667, 1.669, 1.671, 1.669, 1.668, 1.665, 1.661, 1.661, 1.663, 1.663, 1.659,
1.653, 1.665, 1.665, 1.661, 1.661, 1.664, 1.667, 1.668, 1.668, 1.665, 1.661, 1.658, 1.659, 1.662, 1.662, 1.657,
1.651, 1.664, 1.664, 1.659, 1.659, 1.661, 1.663, 1.663, 1.662, 1.661, 1.658, 1.656, 1.657, 1.662, 1.662, 1.655,
1.645, 1.661, 1.663, 1.661, 1.659, 1.659, 1.659, 1.657, 1.657, 1.656, 1.654, 1.655, 1.656, 1.661, 1.661, 1.649,
1.641, 1.654, 1.661, 1.661, 1.659, 1.657, 1.655, 1.653, 1.652, 1.651, 1.652, 1.653, 1.657, 1.658, 1.655, 1.644,
1.635, 1.645, 1.661, 1.661, 1.661, 1.655, 1.653, 1.649, 1.648, 1.647, 1.651, 1.653, 1.657, 1.657, 1.646, 1.638
]
}
],
"luminance_lut":
[
3.535, 3.279, 3.049, 2.722, 2.305, 1.958, 1.657, 1.647, 1.647, 1.656, 1.953, 2.289, 2.707, 3.058, 3.325, 3.589,
3.379, 3.157, 2.874, 2.421, 1.973, 1.735, 1.472, 1.388, 1.388, 1.471, 1.724, 1.963, 2.409, 2.877, 3.185, 3.416,
3.288, 3.075, 2.696, 2.169, 1.735, 1.472, 1.311, 1.208, 1.208, 1.306, 1.471, 1.724, 2.159, 2.695, 3.092, 3.321,
3.238, 3.001, 2.534, 1.981, 1.572, 1.311, 1.207, 1.082, 1.082, 1.204, 1.306, 1.563, 1.973, 2.529, 3.008, 3.259,
3.211, 2.938, 2.414, 1.859, 1.468, 1.221, 1.082, 1.036, 1.031, 1.079, 1.217, 1.463, 1.851, 2.403, 2.931, 3.229,
3.206, 2.904, 2.356, 1.802, 1.421, 1.181, 1.037, 1.002, 1.002, 1.032, 1.175, 1.414, 1.793, 2.343, 2.899, 3.223,
3.206, 2.904, 2.356, 1.802, 1.421, 1.181, 1.037, 1.005, 1.005, 1.032, 1.175, 1.414, 1.793, 2.343, 2.899, 3.223,
3.211, 2.936, 2.417, 1.858, 1.468, 1.222, 1.083, 1.037, 1.032, 1.083, 1.218, 1.463, 1.848, 2.403, 2.932, 3.226,
3.234, 2.997, 2.536, 1.979, 1.569, 1.311, 1.206, 1.084, 1.084, 1.204, 1.305, 1.565, 1.966, 2.524, 2.996, 3.251,
3.282, 3.069, 2.697, 2.166, 1.731, 1.471, 1.311, 1.207, 1.207, 1.305, 1.466, 1.729, 2.158, 2.689, 3.077, 3.304,
3.369, 3.146, 2.873, 2.415, 1.964, 1.722, 1.471, 1.382, 1.382, 1.466, 1.722, 1.964, 2.408, 2.871, 3.167, 3.401,
3.524, 3.253, 3.025, 2.691, 2.275, 1.939, 1.657, 1.628, 1.628, 1.654, 1.936, 2.275, 2.687, 3.029, 3.284, 3.574
],
"sigma": 0.00195,
"sigma_Cb": 0.00241
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2750,
"ccm":
[
1.13004, 0.36392, -0.49396,
-0.45885, 1.68171, -0.22286,
-0.06473, -0.86962, 1.93435
]
},
{
"ct": 2940,
"ccm":
[
1.29876, 0.09627, -0.39503,
-0.43085, 1.60258, -0.17172,
-0.02638, -0.92581, 1.95218
]
},
{
"ct": 3650,
"ccm":
[
1.57729, -0.29734, -0.27995,
-0.42965, 1.66231, -0.23265,
-0.02183, -0.62331, 1.64514
]
},
{
"ct": 4625,
"ccm":
[
1.52145, -0.22382, -0.29763,
-0.40445, 1.82186, -0.41742,
-0.05732, -0.56222, 1.61954
]
},
{
"ct": 5715,
"ccm":
[
1.67851, -0.39193, -0.28658,
-0.37169, 1.72949, -0.35781,
-0.09556, -0.41951, 1.51508
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.af":
{
"ranges":
{
"normal":
{
"min": 0.0,
"max": 12.0,
"default": 1.0
},
"macro":
{
"min": 4.0,
"max": 32.0,
"default": 6.0
}
},
"speeds":
{
"normal":
{
"step_coarse": 2.0,
"step_fine": 0.5,
"contrast_ratio": 0.75,
"pdaf_gain": -0.03,
"pdaf_squelch": 0.2,
"max_slew": 4.0,
"pdaf_frames": 20,
"dropout_frames": 6,
"step_frames": 4
},
"fast":
{
"step_coarse": 2.0,
"step_fine": 0.5,
"contrast_ratio": 0.75,
"pdaf_gain": -0.05,
"pdaf_squelch": 0.2,
"max_slew": 5.0,
"pdaf_frames": 16,
"dropout_frames": 6,
"step_frames": 4
}
},
"conf_epsilon": 8,
"conf_thresh": 12,
"conf_clip": 512,
"skip_frames": 5,
"map": [ 0.0, 420, 35.0, 920 ]
}
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx378.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 9999,
"reference_gain": 1.95,
"reference_aperture": 1.0,
"reference_lux": 1000,
"reference_Y": 12996
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.641
}
},
{
"rpi.geq":
{
"offset": 235,
"slope": 0.00902
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8100
}
},
"bayes": 1,
"ct_curve":
[
2850.0, 0.6361, 0.3911,
3550.0, 0.5386, 0.5077,
4500.0, 0.4472, 0.6171,
5600.0, 0.3906, 0.6848,
8000.0, 0.3412, 0.7441
],
"sensitivity_r": 1.0,
"sensitivity_b": 1.0,
"transverse_pos": 0.01667,
"transverse_neg": 0.01195
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 2800,
"table":
[
1.604, 1.601, 1.593, 1.581, 1.568, 1.561, 1.561, 1.561, 1.561, 1.567, 1.582, 1.596, 1.609, 1.622, 1.632, 1.636,
1.601, 1.594, 1.586, 1.571, 1.555, 1.546, 1.543, 1.543, 1.547, 1.555, 1.572, 1.584, 1.599, 1.614, 1.625, 1.632,
1.599, 1.586, 1.571, 1.555, 1.542, 1.528, 1.518, 1.518, 1.523, 1.537, 1.555, 1.572, 1.589, 1.607, 1.622, 1.629,
1.597, 1.579, 1.561, 1.542, 1.528, 1.512, 1.493, 1.493, 1.499, 1.523, 1.537, 1.563, 1.582, 1.601, 1.619, 1.629,
1.597, 1.577, 1.557, 1.535, 1.512, 1.493, 1.481, 1.479, 1.492, 1.499, 1.524, 1.555, 1.578, 1.599, 1.619, 1.629,
1.597, 1.577, 1.557, 1.534, 1.508, 1.483, 1.476, 1.476, 1.481, 1.496, 1.522, 1.554, 1.578, 1.599, 1.619, 1.629,
1.597, 1.578, 1.557, 1.534, 1.508, 1.483, 1.481, 1.479, 1.481, 1.496, 1.522, 1.554, 1.579, 1.601, 1.619, 1.631,
1.597, 1.581, 1.562, 1.539, 1.517, 1.504, 1.483, 1.481, 1.496, 1.511, 1.531, 1.561, 1.585, 1.607, 1.623, 1.632,
1.601, 1.589, 1.569, 1.554, 1.539, 1.517, 1.504, 1.504, 1.511, 1.531, 1.553, 1.573, 1.596, 1.614, 1.629, 1.636,
1.609, 1.601, 1.586, 1.569, 1.554, 1.542, 1.535, 1.535, 1.541, 1.553, 1.573, 1.592, 1.608, 1.625, 1.637, 1.645,
1.617, 1.611, 1.601, 1.586, 1.574, 1.565, 1.564, 1.564, 1.571, 1.579, 1.592, 1.608, 1.622, 1.637, 1.646, 1.654,
1.619, 1.617, 1.611, 1.601, 1.588, 1.585, 1.585, 1.585, 1.588, 1.592, 1.607, 1.622, 1.637, 1.645, 1.654, 1.655
]
},
{
"ct": 5500,
"table":
[
2.664, 2.658, 2.645, 2.629, 2.602, 2.602, 2.602, 2.606, 2.617, 2.628, 2.649, 2.677, 2.699, 2.722, 2.736, 2.747,
2.658, 2.653, 2.629, 2.605, 2.576, 2.575, 2.577, 2.592, 2.606, 2.618, 2.629, 2.651, 2.678, 2.707, 2.727, 2.741,
2.649, 2.631, 2.605, 2.576, 2.563, 2.552, 2.552, 2.557, 2.577, 2.604, 2.619, 2.641, 2.669, 2.698, 2.721, 2.741,
2.643, 2.613, 2.583, 2.563, 2.552, 2.531, 2.527, 2.527, 2.551, 2.577, 2.604, 2.638, 2.665, 2.694, 2.721, 2.741,
2.643, 2.606, 2.575, 2.558, 2.531, 2.516, 2.504, 2.516, 2.527, 2.551, 2.596, 2.635, 2.665, 2.694, 2.721, 2.741,
2.643, 2.606, 2.575, 2.558, 2.531, 2.503, 2.501, 2.502, 2.522, 2.551, 2.592, 2.635, 2.669, 2.696, 2.727, 2.744,
2.648, 2.611, 2.579, 2.558, 2.532, 2.511, 2.502, 2.511, 2.522, 2.552, 2.592, 2.642, 2.673, 2.702, 2.731, 2.752,
2.648, 2.619, 2.589, 2.571, 2.556, 2.532, 2.519, 2.522, 2.552, 2.568, 2.605, 2.648, 2.683, 2.715, 2.743, 2.758,
2.659, 2.637, 2.613, 2.589, 2.571, 2.556, 2.555, 2.555, 2.568, 2.605, 2.641, 2.671, 2.699, 2.729, 2.758, 2.776,
2.679, 2.665, 2.637, 2.613, 2.602, 2.599, 2.599, 2.606, 2.619, 2.641, 2.671, 2.698, 2.723, 2.754, 2.776, 2.787,
2.695, 2.684, 2.671, 2.646, 2.636, 2.636, 2.641, 2.648, 2.661, 2.681, 2.698, 2.723, 2.751, 2.776, 2.788, 2.803,
2.702, 2.699, 2.684, 2.671, 2.664, 2.664, 2.664, 2.668, 2.681, 2.698, 2.723, 2.751, 2.773, 2.788, 2.803, 2.805
]
}
],
"calibrations_Cb": [
{
"ct": 2800,
"table":
[
2.876, 2.868, 2.863, 2.851, 2.846, 2.846, 2.847, 2.851, 2.851, 2.857, 2.867, 2.875, 2.889, 2.899, 2.913, 2.926,
2.863, 2.861, 2.856, 2.846, 2.846, 2.847, 2.848, 2.851, 2.857, 2.859, 2.875, 2.882, 2.886, 2.896, 2.909, 2.917,
2.861, 2.856, 2.846, 2.841, 2.841, 2.855, 2.867, 2.875, 2.888, 2.888, 2.885, 2.883, 2.886, 2.889, 2.901, 2.913,
2.858, 2.851, 2.846, 2.846, 2.855, 2.867, 2.884, 2.895, 2.902, 2.902, 2.901, 2.891, 2.891, 2.894, 2.901, 2.909,
2.858, 2.851, 2.846, 2.846, 2.867, 2.884, 2.895, 2.902, 2.909, 2.915, 2.911, 2.901, 2.895, 2.898, 2.904, 2.909,
2.858, 2.851, 2.849, 2.853, 2.874, 2.888, 2.901, 2.909, 2.917, 2.922, 2.917, 2.911, 2.901, 2.899, 2.905, 2.908,
2.861, 2.855, 2.853, 2.855, 2.874, 2.888, 2.901, 2.913, 2.918, 2.922, 2.921, 2.911, 2.901, 2.901, 2.907, 2.908,
2.862, 2.859, 2.855, 2.856, 2.872, 2.885, 2.899, 2.906, 2.915, 2.917, 2.911, 2.907, 2.907, 2.907, 2.908, 2.909,
2.863, 2.863, 2.859, 2.864, 2.871, 2.881, 2.885, 2.899, 2.905, 2.905, 2.904, 2.904, 2.907, 2.909, 2.913, 2.913,
2.866, 2.865, 2.865, 2.867, 2.868, 2.872, 2.881, 2.885, 2.889, 2.894, 2.895, 2.902, 2.906, 2.913, 2.914, 2.917,
2.875, 2.875, 2.871, 2.871, 2.871, 2.871, 2.869, 2.869, 2.878, 2.889, 2.894, 2.895, 2.906, 2.914, 2.917, 2.921,
2.882, 2.879, 2.876, 2.874, 2.871, 2.871, 2.869, 2.869, 2.869, 2.878, 2.891, 2.894, 2.905, 2.914, 2.919, 2.921
]
},
{
"ct": 5500,
"table":
[
1.488, 1.488, 1.488, 1.488, 1.491, 1.492, 1.492, 1.491, 1.491, 1.491, 1.492, 1.495, 1.497, 1.499, 1.499, 1.503,
1.482, 1.485, 1.485, 1.487, 1.489, 1.492, 1.492, 1.492, 1.492, 1.492, 1.494, 1.494, 1.492, 1.491, 1.493, 1.494,
1.482, 1.482, 1.484, 1.485, 1.487, 1.492, 1.496, 1.498, 1.499, 1.498, 1.494, 1.492, 1.491, 1.491, 1.491, 1.491,
1.481, 1.481, 1.482, 1.485, 1.491, 1.496, 1.498, 1.499, 1.501, 1.499, 1.498, 1.493, 1.491, 1.488, 1.488, 1.488,
1.481, 1.481, 1.481, 1.483, 1.491, 1.497, 1.498, 1.499, 1.501, 1.499, 1.498, 1.492, 1.488, 1.485, 1.483, 1.483,
1.479, 1.479, 1.481, 1.482, 1.489, 1.495, 1.497, 1.498, 1.499, 1.499, 1.495, 1.492, 1.485, 1.482, 1.482, 1.481,
1.479, 1.479, 1.479, 1.481, 1.489, 1.494, 1.496, 1.497, 1.497, 1.496, 1.495, 1.489, 1.482, 1.481, 1.479, 1.477,
1.478, 1.478, 1.479, 1.481, 1.487, 1.491, 1.494, 1.496, 1.496, 1.495, 1.492, 1.487, 1.482, 1.479, 1.478, 1.476,
1.478, 1.478, 1.479, 1.482, 1.486, 1.488, 1.491, 1.493, 1.493, 1.492, 1.487, 1.484, 1.481, 1.479, 1.476, 1.476,
1.477, 1.479, 1.481, 1.483, 1.485, 1.486, 1.488, 1.488, 1.487, 1.487, 1.484, 1.483, 1.481, 1.479, 1.476, 1.476,
1.477, 1.479, 1.482, 1.483, 1.484, 1.485, 1.484, 1.482, 1.482, 1.484, 1.483, 1.482, 1.481, 1.479, 1.477, 1.476,
1.477, 1.479, 1.482, 1.483, 1.484, 1.484, 1.482, 1.482, 1.482, 1.482, 1.482, 1.481, 1.479, 1.479, 1.479, 1.479
]
}
],
"luminance_lut":
[
2.764, 2.654, 2.321, 2.043, 1.768, 1.594, 1.558, 1.558, 1.558, 1.568, 1.661, 1.904, 2.193, 2.497, 2.888, 3.043,
2.654, 2.373, 2.049, 1.819, 1.569, 1.446, 1.381, 1.356, 1.356, 1.403, 1.501, 1.679, 1.939, 2.218, 2.586, 2.888,
2.376, 2.154, 1.819, 1.569, 1.438, 1.301, 1.246, 1.224, 1.224, 1.263, 1.349, 1.501, 1.679, 1.985, 2.359, 2.609,
2.267, 1.987, 1.662, 1.438, 1.301, 1.235, 1.132, 1.105, 1.105, 1.164, 1.263, 1.349, 1.528, 1.808, 2.184, 2.491,
2.218, 1.876, 1.568, 1.367, 1.235, 1.132, 1.087, 1.022, 1.023, 1.104, 1.164, 1.278, 1.439, 1.695, 2.066, 2.429,
2.218, 1.832, 1.533, 1.341, 1.206, 1.089, 1.013, 1.002, 1.013, 1.026, 1.122, 1.246, 1.399, 1.642, 2.004, 2.426,
2.218, 1.832, 1.533, 1.341, 1.206, 1.089, 1.011, 1.001, 1.009, 1.026, 1.122, 1.246, 1.399, 1.642, 2.004, 2.426,
2.224, 1.896, 1.584, 1.382, 1.248, 1.147, 1.088, 1.016, 1.026, 1.118, 1.168, 1.283, 1.444, 1.697, 2.066, 2.428,
2.292, 2.019, 1.689, 1.462, 1.322, 1.247, 1.147, 1.118, 1.118, 1.168, 1.275, 1.358, 1.532, 1.809, 2.189, 2.491,
2.444, 2.204, 1.856, 1.606, 1.462, 1.322, 1.257, 1.234, 1.234, 1.275, 1.358, 1.516, 1.686, 1.993, 2.371, 2.622,
2.748, 2.444, 2.108, 1.856, 1.606, 1.476, 1.399, 1.376, 1.376, 1.422, 1.516, 1.686, 1.968, 2.238, 2.611, 2.935,
2.862, 2.748, 2.395, 2.099, 1.811, 1.621, 1.582, 1.582, 1.582, 1.592, 1.677, 1.919, 2.223, 2.534, 2.935, 3.078
],
"sigma": 0.00428,
"sigma_Cb": 0.00363
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2850,
"ccm":
[
1.42601, -0.20537, -0.22063,
-0.47682, 1.81987, -0.34305,
0.01854, -0.86036, 1.84181
]
},
{
"ct": 2900,
"ccm":
[
1.29755, 0.04602, -0.34356,
-0.41491, 1.73477, -0.31987,
-0.01345, -0.97115, 1.98459
]
},
{
"ct": 3550,
"ccm":
[
1.49811, -0.33412, -0.16398,
-0.40869, 1.72995, -0.32127,
-0.01924, -0.62181, 1.64105
]
},
{
"ct": 4500,
"ccm":
[
1.47015, -0.29229, -0.17786,
-0.36561, 1.88919, -0.52358,
-0.03552, -0.56717, 1.60269
]
},
{
"ct": 5600,
"ccm":
[
1.60962, -0.47434, -0.13528,
-0.32701, 1.73797, -0.41096,
-0.07626, -0.40171, 1.47796
]
},
{
"ct": 8000,
"ccm":
[
1.54642, -0.20396, -0.34246,
-0.31748, 2.22559, -0.90811,
-0.10035, -0.65877, 1.75912
]
}
]
}
},
{
"rpi.sharpen": { }
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx219_noir.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27685,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 998,
"reference_Y": 12744
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 3.67
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01633
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"bayes": 0
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.7,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.487, 1.481, 1.481, 1.445, 1.389, 1.327, 1.307, 1.307, 1.307, 1.309, 1.341, 1.405, 1.458, 1.494, 1.494, 1.497,
1.491, 1.481, 1.448, 1.397, 1.331, 1.275, 1.243, 1.229, 1.229, 1.249, 1.287, 1.349, 1.409, 1.463, 1.494, 1.497,
1.491, 1.469, 1.405, 1.331, 1.275, 1.217, 1.183, 1.172, 1.172, 1.191, 1.231, 1.287, 1.349, 1.424, 1.484, 1.499,
1.487, 1.444, 1.363, 1.283, 1.217, 1.183, 1.148, 1.138, 1.138, 1.159, 1.191, 1.231, 1.302, 1.385, 1.461, 1.492,
1.481, 1.423, 1.334, 1.253, 1.189, 1.148, 1.135, 1.119, 1.123, 1.137, 1.159, 1.203, 1.272, 1.358, 1.442, 1.488,
1.479, 1.413, 1.321, 1.236, 1.176, 1.139, 1.118, 1.114, 1.116, 1.123, 1.149, 1.192, 1.258, 1.344, 1.432, 1.487,
1.479, 1.413, 1.321, 1.236, 1.176, 1.139, 1.116, 1.114, 1.115, 1.123, 1.149, 1.192, 1.258, 1.344, 1.432, 1.487,
1.479, 1.425, 1.336, 1.251, 1.189, 1.149, 1.136, 1.118, 1.121, 1.138, 1.158, 1.206, 1.275, 1.358, 1.443, 1.488,
1.488, 1.448, 1.368, 1.285, 1.219, 1.189, 1.149, 1.139, 1.139, 1.158, 1.195, 1.235, 1.307, 1.387, 1.462, 1.493,
1.496, 1.475, 1.411, 1.337, 1.284, 1.219, 1.189, 1.176, 1.176, 1.195, 1.235, 1.296, 1.356, 1.429, 1.487, 1.501,
1.495, 1.489, 1.458, 1.407, 1.337, 1.287, 1.253, 1.239, 1.239, 1.259, 1.296, 1.356, 1.419, 1.472, 1.499, 1.499,
1.494, 1.489, 1.489, 1.453, 1.398, 1.336, 1.317, 1.317, 1.317, 1.321, 1.351, 1.416, 1.467, 1.501, 1.501, 1.499
]
},
{
"ct": 3850,
"table":
[
1.694, 1.688, 1.688, 1.649, 1.588, 1.518, 1.495, 1.495, 1.495, 1.497, 1.532, 1.602, 1.659, 1.698, 1.698, 1.703,
1.698, 1.688, 1.653, 1.597, 1.525, 1.464, 1.429, 1.413, 1.413, 1.437, 1.476, 1.542, 1.606, 1.665, 1.698, 1.703,
1.697, 1.673, 1.605, 1.525, 1.464, 1.401, 1.369, 1.354, 1.354, 1.377, 1.417, 1.476, 1.542, 1.623, 1.687, 1.705,
1.692, 1.646, 1.561, 1.472, 1.401, 1.368, 1.337, 1.323, 1.324, 1.348, 1.377, 1.417, 1.492, 1.583, 1.661, 1.697,
1.686, 1.625, 1.528, 1.439, 1.372, 1.337, 1.321, 1.311, 1.316, 1.324, 1.348, 1.389, 1.461, 1.553, 1.642, 1.694,
1.684, 1.613, 1.514, 1.423, 1.359, 1.328, 1.311, 1.306, 1.306, 1.316, 1.339, 1.378, 1.446, 1.541, 1.633, 1.693,
1.684, 1.613, 1.514, 1.423, 1.359, 1.328, 1.311, 1.305, 1.305, 1.316, 1.339, 1.378, 1.446, 1.541, 1.633, 1.693,
1.685, 1.624, 1.529, 1.438, 1.372, 1.336, 1.324, 1.309, 1.314, 1.323, 1.348, 1.392, 1.462, 1.555, 1.646, 1.694,
1.692, 1.648, 1.561, 1.473, 1.403, 1.372, 1.336, 1.324, 1.324, 1.348, 1.378, 1.423, 1.495, 1.585, 1.667, 1.701,
1.701, 1.677, 1.608, 1.527, 1.471, 1.403, 1.375, 1.359, 1.359, 1.378, 1.423, 1.488, 1.549, 1.631, 1.694, 1.709,
1.702, 1.694, 1.656, 1.601, 1.527, 1.473, 1.441, 1.424, 1.424, 1.443, 1.488, 1.549, 1.621, 1.678, 1.706, 1.707,
1.699, 1.694, 1.694, 1.654, 1.593, 1.525, 1.508, 1.508, 1.508, 1.509, 1.546, 1.614, 1.674, 1.708, 1.708, 1.707
]
},
{
"ct": 6000,
"table":
[
2.179, 2.176, 2.176, 2.125, 2.048, 1.975, 1.955, 1.954, 1.954, 1.956, 1.993, 2.071, 2.141, 2.184, 2.185, 2.188,
2.189, 2.176, 2.128, 2.063, 1.973, 1.908, 1.872, 1.856, 1.856, 1.876, 1.922, 1.999, 2.081, 2.144, 2.184, 2.192,
2.187, 2.152, 2.068, 1.973, 1.907, 1.831, 1.797, 1.786, 1.786, 1.804, 1.853, 1.922, 1.999, 2.089, 2.166, 2.191,
2.173, 2.117, 2.013, 1.908, 1.831, 1.791, 1.755, 1.749, 1.749, 1.767, 1.804, 1.853, 1.939, 2.041, 2.135, 2.181,
2.166, 2.089, 1.975, 1.869, 1.792, 1.755, 1.741, 1.731, 1.734, 1.749, 1.767, 1.818, 1.903, 2.005, 2.111, 2.173,
2.165, 2.074, 1.956, 1.849, 1.777, 1.742, 1.729, 1.725, 1.729, 1.734, 1.758, 1.804, 1.884, 1.991, 2.099, 2.172,
2.165, 2.074, 1.956, 1.849, 1.777, 1.742, 1.727, 1.724, 1.725, 1.734, 1.758, 1.804, 1.884, 1.991, 2.099, 2.172,
2.166, 2.085, 1.975, 1.869, 1.791, 1.755, 1.741, 1.729, 1.733, 1.749, 1.769, 1.819, 1.904, 2.009, 2.114, 2.174,
2.174, 2.118, 2.015, 1.913, 1.831, 1.791, 1.755, 1.749, 1.749, 1.769, 1.811, 1.855, 1.943, 2.047, 2.139, 2.183,
2.187, 2.151, 2.072, 1.979, 1.911, 1.831, 1.801, 1.791, 1.791, 1.811, 1.855, 1.933, 2.006, 2.101, 2.173, 2.197,
2.189, 2.178, 2.132, 2.069, 1.979, 1.913, 1.879, 1.867, 1.867, 1.891, 1.933, 2.006, 2.091, 2.156, 2.195, 2.197,
2.181, 2.179, 2.178, 2.131, 2.057, 1.981, 1.965, 1.965, 1.965, 1.969, 1.999, 2.083, 2.153, 2.197, 2.197, 2.196
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
1.967, 1.961, 1.955, 1.953, 1.954, 1.957, 1.961, 1.963, 1.963, 1.961, 1.959, 1.957, 1.954, 1.951, 1.951, 1.955,
1.961, 1.959, 1.957, 1.956, 1.962, 1.967, 1.975, 1.979, 1.979, 1.975, 1.971, 1.967, 1.957, 1.952, 1.951, 1.951,
1.959, 1.959, 1.959, 1.966, 1.976, 1.989, 1.999, 2.004, 2.003, 1.997, 1.991, 1.981, 1.967, 1.956, 1.951, 1.951,
1.959, 1.962, 1.967, 1.978, 1.993, 2.009, 2.021, 2.028, 2.026, 2.021, 2.011, 1.995, 1.981, 1.964, 1.953, 1.951,
1.961, 1.965, 1.977, 1.993, 2.009, 2.023, 2.041, 2.047, 2.047, 2.037, 2.024, 2.011, 1.995, 1.975, 1.958, 1.953,
1.963, 1.968, 1.981, 2.001, 2.019, 2.039, 2.046, 2.052, 2.052, 2.051, 2.035, 2.021, 2.001, 1.978, 1.959, 1.955,
1.961, 1.966, 1.981, 2.001, 2.019, 2.038, 2.043, 2.051, 2.052, 2.042, 2.034, 2.019, 2.001, 1.978, 1.959, 1.954,
1.957, 1.961, 1.972, 1.989, 2.003, 2.021, 2.038, 2.039, 2.039, 2.034, 2.019, 2.004, 1.988, 1.971, 1.954, 1.949,
1.952, 1.953, 1.959, 1.972, 1.989, 2.003, 2.016, 2.019, 2.019, 2.014, 2.003, 1.988, 1.971, 1.955, 1.948, 1.947,
1.949, 1.948, 1.949, 1.957, 1.971, 1.978, 1.991, 1.994, 1.994, 1.989, 1.979, 1.967, 1.954, 1.946, 1.947, 1.947,
1.949, 1.946, 1.944, 1.946, 1.949, 1.954, 1.962, 1.967, 1.967, 1.963, 1.956, 1.948, 1.943, 1.943, 1.946, 1.949,
1.951, 1.946, 1.944, 1.942, 1.943, 1.943, 1.947, 1.948, 1.949, 1.947, 1.945, 1.941, 1.938, 1.939, 1.948, 1.952
]
},
{
"ct": 3850,
"table":
[
1.726, 1.724, 1.722, 1.723, 1.731, 1.735, 1.743, 1.746, 1.746, 1.741, 1.735, 1.729, 1.725, 1.721, 1.721, 1.721,
1.724, 1.723, 1.723, 1.727, 1.735, 1.744, 1.749, 1.756, 1.756, 1.749, 1.744, 1.735, 1.727, 1.719, 1.719, 1.719,
1.723, 1.723, 1.724, 1.735, 1.746, 1.759, 1.767, 1.775, 1.775, 1.766, 1.758, 1.746, 1.735, 1.723, 1.718, 1.716,
1.723, 1.725, 1.732, 1.746, 1.759, 1.775, 1.782, 1.792, 1.792, 1.782, 1.772, 1.759, 1.745, 1.729, 1.718, 1.716,
1.725, 1.729, 1.738, 1.756, 1.775, 1.785, 1.796, 1.803, 1.804, 1.794, 1.783, 1.772, 1.757, 1.736, 1.722, 1.718,
1.728, 1.731, 1.741, 1.759, 1.781, 1.795, 1.803, 1.806, 1.808, 1.805, 1.791, 1.779, 1.762, 1.739, 1.722, 1.721,
1.727, 1.731, 1.741, 1.759, 1.781, 1.791, 1.799, 1.804, 1.806, 1.801, 1.791, 1.779, 1.762, 1.739, 1.722, 1.717,
1.722, 1.724, 1.733, 1.751, 1.768, 1.781, 1.791, 1.796, 1.799, 1.791, 1.781, 1.766, 1.754, 1.731, 1.717, 1.714,
1.718, 1.718, 1.724, 1.737, 1.752, 1.768, 1.776, 1.782, 1.784, 1.781, 1.766, 1.754, 1.737, 1.724, 1.713, 1.709,
1.716, 1.715, 1.716, 1.725, 1.737, 1.749, 1.756, 1.763, 1.764, 1.762, 1.749, 1.737, 1.724, 1.717, 1.709, 1.708,
1.715, 1.714, 1.712, 1.715, 1.722, 1.729, 1.736, 1.741, 1.742, 1.739, 1.731, 1.723, 1.717, 1.712, 1.711, 1.709,
1.716, 1.714, 1.711, 1.712, 1.715, 1.719, 1.723, 1.728, 1.731, 1.729, 1.723, 1.718, 1.711, 1.711, 1.713, 1.713
]
},
{
"ct": 6000,
"table":
[
1.374, 1.372, 1.373, 1.374, 1.375, 1.378, 1.378, 1.381, 1.382, 1.382, 1.378, 1.373, 1.372, 1.369, 1.365, 1.365,
1.371, 1.371, 1.372, 1.374, 1.378, 1.381, 1.384, 1.386, 1.388, 1.387, 1.384, 1.377, 1.372, 1.368, 1.364, 1.362,
1.369, 1.371, 1.372, 1.377, 1.383, 1.391, 1.394, 1.396, 1.397, 1.395, 1.391, 1.382, 1.374, 1.369, 1.362, 1.361,
1.369, 1.371, 1.375, 1.383, 1.391, 1.399, 1.402, 1.404, 1.405, 1.403, 1.398, 1.391, 1.379, 1.371, 1.363, 1.361,
1.371, 1.373, 1.378, 1.388, 1.399, 1.407, 1.411, 1.413, 1.413, 1.411, 1.405, 1.397, 1.385, 1.374, 1.366, 1.362,
1.371, 1.374, 1.379, 1.389, 1.405, 1.411, 1.414, 1.414, 1.415, 1.415, 1.411, 1.401, 1.388, 1.376, 1.367, 1.363,
1.371, 1.373, 1.379, 1.389, 1.405, 1.408, 1.413, 1.414, 1.414, 1.413, 1.409, 1.401, 1.388, 1.376, 1.367, 1.362,
1.366, 1.369, 1.374, 1.384, 1.396, 1.404, 1.407, 1.408, 1.408, 1.408, 1.401, 1.395, 1.382, 1.371, 1.363, 1.359,
1.364, 1.365, 1.368, 1.375, 1.386, 1.396, 1.399, 1.401, 1.399, 1.399, 1.395, 1.385, 1.374, 1.365, 1.359, 1.357,
1.361, 1.363, 1.365, 1.368, 1.377, 1.384, 1.388, 1.391, 1.391, 1.388, 1.385, 1.375, 1.366, 1.361, 1.358, 1.356,
1.361, 1.362, 1.362, 1.364, 1.367, 1.373, 1.376, 1.377, 1.377, 1.375, 1.373, 1.366, 1.362, 1.358, 1.358, 1.358,
1.361, 1.362, 1.362, 1.362, 1.363, 1.367, 1.369, 1.368, 1.367, 1.367, 1.367, 1.364, 1.358, 1.357, 1.358, 1.359
]
}
],
"luminance_lut":
[
2.716, 2.568, 2.299, 2.065, 1.845, 1.693, 1.605, 1.597, 1.596, 1.634, 1.738, 1.914, 2.145, 2.394, 2.719, 2.901,
2.593, 2.357, 2.093, 1.876, 1.672, 1.528, 1.438, 1.393, 1.394, 1.459, 1.569, 1.731, 1.948, 2.169, 2.481, 2.756,
2.439, 2.197, 1.922, 1.691, 1.521, 1.365, 1.266, 1.222, 1.224, 1.286, 1.395, 1.573, 1.747, 1.988, 2.299, 2.563,
2.363, 2.081, 1.797, 1.563, 1.376, 1.244, 1.152, 1.099, 1.101, 1.158, 1.276, 1.421, 1.607, 1.851, 2.163, 2.455,
2.342, 2.003, 1.715, 1.477, 1.282, 1.152, 1.074, 1.033, 1.035, 1.083, 1.163, 1.319, 1.516, 1.759, 2.064, 2.398,
2.342, 1.985, 1.691, 1.446, 1.249, 1.111, 1.034, 1.004, 1.004, 1.028, 1.114, 1.274, 1.472, 1.716, 2.019, 2.389,
2.342, 1.991, 1.691, 1.446, 1.249, 1.112, 1.034, 1.011, 1.005, 1.035, 1.114, 1.274, 1.472, 1.716, 2.019, 2.389,
2.365, 2.052, 1.751, 1.499, 1.299, 1.171, 1.089, 1.039, 1.042, 1.084, 1.162, 1.312, 1.516, 1.761, 2.059, 2.393,
2.434, 2.159, 1.856, 1.601, 1.403, 1.278, 1.166, 1.114, 1.114, 1.162, 1.266, 1.402, 1.608, 1.847, 2.146, 2.435,
2.554, 2.306, 2.002, 1.748, 1.563, 1.396, 1.299, 1.247, 1.243, 1.279, 1.386, 1.551, 1.746, 1.977, 2.272, 2.518,
2.756, 2.493, 2.195, 1.947, 1.739, 1.574, 1.481, 1.429, 1.421, 1.457, 1.559, 1.704, 1.929, 2.159, 2.442, 2.681,
2.935, 2.739, 2.411, 2.151, 1.922, 1.749, 1.663, 1.628, 1.625, 1.635, 1.716, 1.872, 2.113, 2.368, 2.663, 2.824
],
"sigma": 0.00381,
"sigma_Cb": 0.00216
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2498,
"ccm":
[
1.58731, -0.18011, -0.40721,
-0.60639, 2.03422, -0.42782,
-0.19612, -1.69203, 2.88815
]
},
{
"ct": 2811,
"ccm":
[
1.61593, -0.33164, -0.28429,
-0.55048, 1.97779, -0.42731,
-0.12042, -1.42847, 2.54889
]
},
{
"ct": 2911,
"ccm":
[
1.62771, -0.41282, -0.21489,
-0.57991, 2.04176, -0.46186,
-0.07613, -1.13359, 2.20972
]
},
{
"ct": 2919,
"ccm":
[
1.62661, -0.37736, -0.24925,
-0.52519, 1.95233, -0.42714,
-0.10842, -1.34929, 2.45771
]
},
{
"ct": 3627,
"ccm":
[
1.70385, -0.57231, -0.13154,
-0.47763, 1.85998, -0.38235,
-0.07467, -0.82678, 1.90145
]
},
{
"ct": 4600,
"ccm":
[
1.68486, -0.61085, -0.07402,
-0.41927, 2.04016, -0.62089,
-0.08633, -0.67672, 1.76305
]
},
{
"ct": 5716,
"ccm":
[
1.80439, -0.73699, -0.06739,
-0.36073, 1.83327, -0.47255,
-0.08378, -0.56403, 1.64781
]
},
{
"ct": 8575,
"ccm":
[
1.89357, -0.76427, -0.12931,
-0.27399, 2.15605, -0.88206,
-0.12035, -0.68256, 1.80292
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
}
|
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/ov5647_noir.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 1024
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 21663,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 987,
"reference_Y": 8961
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 4.25
}
},
{
"rpi.geq":
{
"offset": 401,
"slope": 0.05619
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"bayes": 0
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"base_ev": 1.25
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.105, 1.103, 1.093, 1.083, 1.071, 1.065, 1.065, 1.065, 1.066, 1.069, 1.072, 1.077, 1.084, 1.089, 1.093, 1.093,
1.103, 1.096, 1.084, 1.072, 1.059, 1.051, 1.047, 1.047, 1.051, 1.053, 1.059, 1.067, 1.075, 1.082, 1.085, 1.086,
1.096, 1.084, 1.072, 1.059, 1.051, 1.045, 1.039, 1.038, 1.039, 1.045, 1.049, 1.057, 1.063, 1.072, 1.081, 1.082,
1.092, 1.075, 1.061, 1.052, 1.045, 1.039, 1.036, 1.035, 1.035, 1.039, 1.044, 1.049, 1.056, 1.063, 1.072, 1.081,
1.092, 1.073, 1.058, 1.048, 1.043, 1.038, 1.035, 1.033, 1.033, 1.035, 1.039, 1.044, 1.051, 1.057, 1.069, 1.078,
1.091, 1.068, 1.054, 1.045, 1.041, 1.038, 1.035, 1.032, 1.032, 1.032, 1.036, 1.041, 1.045, 1.055, 1.069, 1.078,
1.091, 1.068, 1.052, 1.043, 1.041, 1.038, 1.035, 1.032, 1.031, 1.032, 1.034, 1.036, 1.043, 1.055, 1.069, 1.078,
1.092, 1.068, 1.052, 1.047, 1.042, 1.041, 1.038, 1.035, 1.032, 1.032, 1.035, 1.039, 1.043, 1.055, 1.071, 1.079,
1.092, 1.073, 1.057, 1.051, 1.047, 1.047, 1.044, 1.041, 1.038, 1.038, 1.039, 1.043, 1.051, 1.059, 1.076, 1.083,
1.092, 1.081, 1.068, 1.058, 1.056, 1.056, 1.053, 1.052, 1.049, 1.048, 1.048, 1.051, 1.059, 1.066, 1.083, 1.085,
1.091, 1.087, 1.081, 1.068, 1.065, 1.064, 1.062, 1.062, 1.061, 1.056, 1.056, 1.056, 1.064, 1.069, 1.084, 1.089,
1.091, 1.089, 1.085, 1.079, 1.069, 1.068, 1.067, 1.067, 1.067, 1.063, 1.061, 1.063, 1.068, 1.069, 1.081, 1.092
]
},
{
"ct": 5000,
"table":
[
1.486, 1.484, 1.468, 1.449, 1.427, 1.403, 1.399, 1.399, 1.399, 1.404, 1.413, 1.433, 1.454, 1.473, 1.482, 1.488,
1.484, 1.472, 1.454, 1.431, 1.405, 1.381, 1.365, 1.365, 1.367, 1.373, 1.392, 1.411, 1.438, 1.458, 1.476, 1.481,
1.476, 1.458, 1.433, 1.405, 1.381, 1.361, 1.339, 1.334, 1.334, 1.346, 1.362, 1.391, 1.411, 1.438, 1.462, 1.474,
1.471, 1.443, 1.417, 1.388, 1.361, 1.339, 1.321, 1.313, 1.313, 1.327, 1.346, 1.362, 1.391, 1.422, 1.453, 1.473,
1.469, 1.439, 1.408, 1.377, 1.349, 1.321, 1.312, 1.299, 1.299, 1.311, 1.327, 1.348, 1.378, 1.415, 1.446, 1.468,
1.468, 1.434, 1.402, 1.371, 1.341, 1.316, 1.299, 1.296, 1.295, 1.299, 1.314, 1.338, 1.371, 1.408, 1.441, 1.466,
1.468, 1.434, 1.401, 1.371, 1.341, 1.316, 1.301, 1.296, 1.295, 1.297, 1.314, 1.338, 1.369, 1.408, 1.441, 1.465,
1.469, 1.436, 1.401, 1.374, 1.348, 1.332, 1.315, 1.301, 1.301, 1.313, 1.324, 1.342, 1.372, 1.409, 1.442, 1.465,
1.471, 1.444, 1.413, 1.388, 1.371, 1.348, 1.332, 1.323, 1.323, 1.324, 1.342, 1.362, 1.386, 1.418, 1.449, 1.467,
1.473, 1.454, 1.431, 1.407, 1.388, 1.371, 1.359, 1.352, 1.351, 1.351, 1.362, 1.383, 1.404, 1.433, 1.462, 1.472,
1.474, 1.461, 1.447, 1.424, 1.407, 1.394, 1.385, 1.381, 1.379, 1.381, 1.383, 1.401, 1.419, 1.444, 1.466, 1.481,
1.474, 1.464, 1.455, 1.442, 1.421, 1.408, 1.403, 1.403, 1.403, 1.399, 1.402, 1.415, 1.432, 1.446, 1.467, 1.483
]
},
{
"ct": 6500,
"table":
[
1.567, 1.565, 1.555, 1.541, 1.525, 1.518, 1.518, 1.518, 1.521, 1.527, 1.532, 1.541, 1.551, 1.559, 1.567, 1.569,
1.565, 1.557, 1.542, 1.527, 1.519, 1.515, 1.511, 1.516, 1.519, 1.524, 1.528, 1.533, 1.542, 1.553, 1.559, 1.562,
1.561, 1.546, 1.532, 1.521, 1.518, 1.515, 1.511, 1.516, 1.519, 1.524, 1.528, 1.529, 1.533, 1.542, 1.554, 1.559,
1.561, 1.539, 1.526, 1.524, 1.521, 1.521, 1.522, 1.524, 1.525, 1.531, 1.529, 1.529, 1.531, 1.538, 1.549, 1.558,
1.559, 1.538, 1.526, 1.525, 1.524, 1.528, 1.534, 1.536, 1.536, 1.536, 1.532, 1.529, 1.531, 1.537, 1.548, 1.556,
1.561, 1.537, 1.525, 1.524, 1.526, 1.532, 1.537, 1.539, 1.538, 1.537, 1.532, 1.529, 1.529, 1.537, 1.546, 1.556,
1.561, 1.536, 1.524, 1.522, 1.525, 1.532, 1.538, 1.538, 1.537, 1.533, 1.528, 1.526, 1.527, 1.536, 1.546, 1.555,
1.561, 1.537, 1.522, 1.521, 1.524, 1.531, 1.536, 1.537, 1.534, 1.529, 1.526, 1.522, 1.523, 1.534, 1.547, 1.555,
1.561, 1.538, 1.524, 1.522, 1.526, 1.531, 1.535, 1.535, 1.534, 1.527, 1.524, 1.522, 1.522, 1.535, 1.549, 1.556,
1.558, 1.543, 1.532, 1.526, 1.526, 1.529, 1.534, 1.535, 1.533, 1.526, 1.523, 1.522, 1.524, 1.537, 1.552, 1.557,
1.555, 1.546, 1.541, 1.528, 1.527, 1.528, 1.531, 1.533, 1.531, 1.527, 1.522, 1.522, 1.526, 1.536, 1.552, 1.561,
1.555, 1.547, 1.542, 1.538, 1.526, 1.526, 1.529, 1.531, 1.529, 1.528, 1.519, 1.519, 1.527, 1.531, 1.543, 1.561
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
1.684, 1.688, 1.691, 1.697, 1.709, 1.722, 1.735, 1.745, 1.747, 1.745, 1.731, 1.719, 1.709, 1.705, 1.699, 1.699,
1.684, 1.689, 1.694, 1.708, 1.721, 1.735, 1.747, 1.762, 1.762, 1.758, 1.745, 1.727, 1.716, 1.707, 1.701, 1.699,
1.684, 1.691, 1.704, 1.719, 1.734, 1.755, 1.772, 1.786, 1.789, 1.788, 1.762, 1.745, 1.724, 1.709, 1.702, 1.698,
1.682, 1.694, 1.709, 1.729, 1.755, 1.773, 1.798, 1.815, 1.817, 1.808, 1.788, 1.762, 1.733, 1.714, 1.704, 1.699,
1.682, 1.693, 1.713, 1.742, 1.772, 1.798, 1.815, 1.829, 1.831, 1.821, 1.807, 1.773, 1.742, 1.716, 1.703, 1.699,
1.681, 1.693, 1.713, 1.742, 1.772, 1.799, 1.828, 1.839, 1.839, 1.828, 1.807, 1.774, 1.742, 1.715, 1.699, 1.695,
1.679, 1.691, 1.712, 1.739, 1.771, 1.798, 1.825, 1.829, 1.831, 1.818, 1.801, 1.774, 1.738, 1.712, 1.695, 1.691,
1.676, 1.685, 1.703, 1.727, 1.761, 1.784, 1.801, 1.817, 1.817, 1.801, 1.779, 1.761, 1.729, 1.706, 1.691, 1.684,
1.669, 1.678, 1.692, 1.714, 1.741, 1.764, 1.784, 1.795, 1.795, 1.779, 1.761, 1.738, 1.713, 1.696, 1.683, 1.679,
1.664, 1.671, 1.679, 1.693, 1.716, 1.741, 1.762, 1.769, 1.769, 1.753, 1.738, 1.713, 1.701, 1.687, 1.681, 1.676,
1.661, 1.664, 1.671, 1.679, 1.693, 1.714, 1.732, 1.739, 1.739, 1.729, 1.708, 1.701, 1.685, 1.679, 1.676, 1.677,
1.659, 1.661, 1.664, 1.671, 1.679, 1.693, 1.712, 1.714, 1.714, 1.708, 1.701, 1.687, 1.679, 1.672, 1.673, 1.677
]
},
{
"ct": 5000,
"table":
[
1.177, 1.183, 1.187, 1.191, 1.197, 1.206, 1.213, 1.215, 1.215, 1.215, 1.211, 1.204, 1.196, 1.191, 1.183, 1.182,
1.179, 1.185, 1.191, 1.196, 1.206, 1.217, 1.224, 1.229, 1.229, 1.226, 1.221, 1.212, 1.202, 1.195, 1.188, 1.182,
1.183, 1.191, 1.196, 1.206, 1.217, 1.229, 1.239, 1.245, 1.245, 1.245, 1.233, 1.221, 1.212, 1.199, 1.193, 1.187,
1.183, 1.192, 1.201, 1.212, 1.229, 1.241, 1.252, 1.259, 1.259, 1.257, 1.245, 1.233, 1.217, 1.201, 1.194, 1.192,
1.183, 1.192, 1.202, 1.219, 1.238, 1.252, 1.261, 1.269, 1.268, 1.261, 1.257, 1.241, 1.223, 1.204, 1.194, 1.191,
1.182, 1.192, 1.202, 1.219, 1.239, 1.255, 1.266, 1.271, 1.271, 1.265, 1.258, 1.242, 1.223, 1.205, 1.192, 1.191,
1.181, 1.189, 1.199, 1.218, 1.239, 1.254, 1.262, 1.268, 1.268, 1.258, 1.253, 1.241, 1.221, 1.204, 1.191, 1.187,
1.179, 1.184, 1.193, 1.211, 1.232, 1.243, 1.254, 1.257, 1.256, 1.253, 1.242, 1.232, 1.216, 1.199, 1.187, 1.183,
1.174, 1.179, 1.187, 1.202, 1.218, 1.232, 1.243, 1.246, 1.246, 1.239, 1.232, 1.218, 1.207, 1.191, 1.183, 1.179,
1.169, 1.175, 1.181, 1.189, 1.202, 1.218, 1.229, 1.232, 1.232, 1.224, 1.218, 1.207, 1.199, 1.185, 1.181, 1.174,
1.164, 1.168, 1.175, 1.179, 1.189, 1.201, 1.209, 1.213, 1.213, 1.209, 1.201, 1.198, 1.186, 1.181, 1.174, 1.173,
1.161, 1.166, 1.171, 1.175, 1.179, 1.189, 1.197, 1.198, 1.198, 1.197, 1.196, 1.186, 1.182, 1.175, 1.173, 1.173
]
},
{
"ct": 6500,
"table":
[
1.166, 1.171, 1.173, 1.178, 1.187, 1.193, 1.201, 1.205, 1.205, 1.205, 1.199, 1.191, 1.184, 1.179, 1.174, 1.171,
1.166, 1.172, 1.176, 1.184, 1.195, 1.202, 1.209, 1.216, 1.216, 1.213, 1.208, 1.201, 1.189, 1.182, 1.176, 1.171,
1.166, 1.173, 1.183, 1.195, 1.202, 1.214, 1.221, 1.228, 1.229, 1.228, 1.221, 1.209, 1.201, 1.186, 1.179, 1.174,
1.165, 1.174, 1.187, 1.201, 1.214, 1.223, 1.235, 1.241, 1.242, 1.241, 1.229, 1.221, 1.205, 1.188, 1.181, 1.177,
1.165, 1.174, 1.189, 1.207, 1.223, 1.235, 1.242, 1.253, 1.252, 1.245, 1.241, 1.228, 1.211, 1.189, 1.181, 1.178,
1.164, 1.173, 1.189, 1.207, 1.224, 1.238, 1.249, 1.255, 1.255, 1.249, 1.242, 1.228, 1.211, 1.191, 1.179, 1.176,
1.163, 1.172, 1.187, 1.207, 1.223, 1.237, 1.245, 1.253, 1.252, 1.243, 1.237, 1.228, 1.207, 1.188, 1.176, 1.173,
1.159, 1.167, 1.179, 1.199, 1.217, 1.227, 1.237, 1.241, 1.241, 1.237, 1.228, 1.217, 1.201, 1.184, 1.174, 1.169,
1.156, 1.164, 1.172, 1.189, 1.205, 1.217, 1.226, 1.229, 1.229, 1.222, 1.217, 1.204, 1.192, 1.177, 1.171, 1.166,
1.154, 1.159, 1.166, 1.177, 1.189, 1.205, 1.213, 1.216, 1.216, 1.209, 1.204, 1.192, 1.183, 1.172, 1.168, 1.162,
1.152, 1.155, 1.161, 1.166, 1.177, 1.188, 1.195, 1.198, 1.199, 1.196, 1.187, 1.183, 1.173, 1.168, 1.163, 1.162,
1.151, 1.154, 1.158, 1.162, 1.168, 1.177, 1.183, 1.184, 1.184, 1.184, 1.182, 1.172, 1.168, 1.165, 1.162, 1.161
]
}
],
"luminance_lut":
[
2.236, 2.111, 1.912, 1.741, 1.579, 1.451, 1.379, 1.349, 1.349, 1.361, 1.411, 1.505, 1.644, 1.816, 2.034, 2.159,
2.139, 1.994, 1.796, 1.625, 1.467, 1.361, 1.285, 1.248, 1.239, 1.265, 1.321, 1.408, 1.536, 1.703, 1.903, 2.087,
2.047, 1.898, 1.694, 1.511, 1.373, 1.254, 1.186, 1.152, 1.142, 1.166, 1.226, 1.309, 1.441, 1.598, 1.799, 1.978,
1.999, 1.824, 1.615, 1.429, 1.281, 1.179, 1.113, 1.077, 1.071, 1.096, 1.153, 1.239, 1.357, 1.525, 1.726, 1.915,
1.976, 1.773, 1.563, 1.374, 1.222, 1.119, 1.064, 1.032, 1.031, 1.049, 1.099, 1.188, 1.309, 1.478, 1.681, 1.893,
1.973, 1.756, 1.542, 1.351, 1.196, 1.088, 1.028, 1.011, 1.004, 1.029, 1.077, 1.169, 1.295, 1.459, 1.663, 1.891,
1.973, 1.761, 1.541, 1.349, 1.193, 1.087, 1.031, 1.006, 1.006, 1.023, 1.075, 1.169, 1.298, 1.463, 1.667, 1.891,
1.982, 1.789, 1.568, 1.373, 1.213, 1.111, 1.051, 1.029, 1.024, 1.053, 1.106, 1.199, 1.329, 1.495, 1.692, 1.903,
2.015, 1.838, 1.621, 1.426, 1.268, 1.159, 1.101, 1.066, 1.068, 1.099, 1.166, 1.259, 1.387, 1.553, 1.751, 1.937,
2.076, 1.911, 1.692, 1.507, 1.346, 1.236, 1.169, 1.136, 1.139, 1.174, 1.242, 1.349, 1.475, 1.641, 1.833, 2.004,
2.193, 2.011, 1.798, 1.604, 1.444, 1.339, 1.265, 1.235, 1.237, 1.273, 1.351, 1.461, 1.598, 1.758, 1.956, 2.125,
2.263, 2.154, 1.916, 1.711, 1.549, 1.432, 1.372, 1.356, 1.356, 1.383, 1.455, 1.578, 1.726, 1.914, 2.119, 2.211
],
"sigma": 0.006,
"sigma_Cb": 0.00208
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2500,
"ccm":
[
1.70741, -0.05307, -0.65433,
-0.62822, 1.68836, -0.06014,
-0.04452, -1.87628, 2.92079
]
},
{
"ct": 2803,
"ccm":
[
1.74383, -0.18731, -0.55652,
-0.56491, 1.67772, -0.11281,
-0.01522, -1.60635, 2.62157
]
},
{
"ct": 2912,
"ccm":
[
1.75215, -0.22221, -0.52995,
-0.54568, 1.63522, -0.08954,
0.02633, -1.56997, 2.54364
]
},
{
"ct": 2914,
"ccm":
[
1.72423, -0.28939, -0.43484,
-0.55188, 1.62925, -0.07737,
0.01959, -1.28661, 2.26702
]
},
{
"ct": 3605,
"ccm":
[
1.80381, -0.43646, -0.36735,
-0.46505, 1.56814, -0.10309,
0.00929, -1.00424, 1.99495
]
},
{
"ct": 4540,
"ccm":
[
1.85263, -0.46545, -0.38719,
-0.44136, 1.68443, -0.24307,
0.04108, -0.85599, 1.81491
]
},
{
"ct": 5699,
"ccm":
[
1.98595, -0.63542, -0.35054,
-0.34623, 1.54146, -0.19522,
0.00411, -0.70936, 1.70525
]
},
{
"ct": 8625,
"ccm":
[
2.21637, -0.56663, -0.64974,
-0.41133, 1.96625, -0.55492,
-0.02307, -0.83529, 1.85837
]
}
]
}
},
{
"rpi.sharpen": { }
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx519.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 13841,
"reference_gain": 2.0,
"reference_aperture": 1.0,
"reference_lux": 900,
"reference_Y": 12064
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.776
}
},
{
"rpi.geq":
{
"offset": 189,
"slope": 0.01495
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 7900
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8000
}
},
"bayes": 1,
"ct_curve":
[
2890.0, 0.7328, 0.3734,
3550.0, 0.6228, 0.4763,
4500.0, 0.5208, 0.5825,
5700.0, 0.4467, 0.6671,
7900.0, 0.3858, 0.7411
],
"sensitivity_r": 1.0,
"sensitivity_b": 1.0,
"transverse_pos": 0.02027,
"transverse_neg": 0.01935
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.527, 1.521, 1.508, 1.493, 1.476, 1.455, 1.442, 1.441, 1.441, 1.441, 1.448, 1.467, 1.483, 1.494, 1.503, 1.504,
1.525, 1.513, 1.496, 1.477, 1.461, 1.434, 1.418, 1.409, 1.409, 1.416, 1.429, 1.449, 1.469, 1.485, 1.495, 1.503,
1.517, 1.506, 1.485, 1.461, 1.434, 1.412, 1.388, 1.376, 1.376, 1.386, 1.405, 1.429, 1.449, 1.471, 1.488, 1.495,
1.512, 1.496, 1.471, 1.442, 1.412, 1.388, 1.361, 1.344, 1.344, 1.358, 1.384, 1.405, 1.431, 1.456, 1.479, 1.489,
1.508, 1.488, 1.458, 1.425, 1.393, 1.361, 1.343, 1.322, 1.321, 1.342, 1.358, 1.385, 1.416, 1.445, 1.471, 1.484,
1.507, 1.482, 1.453, 1.418, 1.382, 1.349, 1.322, 1.318, 1.318, 1.321, 1.345, 1.373, 1.405, 1.437, 1.465, 1.483,
1.507, 1.482, 1.453, 1.418, 1.382, 1.349, 1.322, 1.313, 1.313, 1.321, 1.345, 1.373, 1.405, 1.437, 1.465, 1.483,
1.507, 1.485, 1.455, 1.422, 1.387, 1.355, 1.333, 1.319, 1.321, 1.333, 1.351, 1.381, 1.411, 1.441, 1.467, 1.483,
1.508, 1.489, 1.463, 1.432, 1.401, 1.372, 1.355, 1.333, 1.333, 1.351, 1.369, 1.393, 1.422, 1.448, 1.471, 1.484,
1.511, 1.494, 1.472, 1.444, 1.416, 1.398, 1.372, 1.361, 1.361, 1.369, 1.393, 1.411, 1.436, 1.458, 1.477, 1.487,
1.511, 1.496, 1.478, 1.455, 1.436, 1.416, 1.399, 1.391, 1.391, 1.397, 1.411, 1.429, 1.451, 1.466, 1.479, 1.487,
1.511, 1.495, 1.478, 1.462, 1.448, 1.432, 1.419, 1.419, 1.419, 1.419, 1.429, 1.445, 1.459, 1.471, 1.482, 1.487
]
},
{
"ct": 6000,
"table":
[
2.581, 2.573, 2.558, 2.539, 2.514, 2.487, 2.473, 2.471, 2.471, 2.471, 2.479, 2.499, 2.517, 2.532, 2.543, 2.544,
2.575, 2.559, 2.539, 2.521, 2.491, 2.458, 2.435, 2.421, 2.421, 2.429, 2.449, 2.477, 2.499, 2.519, 2.534, 2.543,
2.561, 2.549, 2.521, 2.491, 2.457, 2.423, 2.393, 2.375, 2.375, 2.387, 2.412, 2.444, 2.475, 2.499, 2.519, 2.532,
2.552, 2.531, 2.498, 2.459, 2.423, 2.391, 2.349, 2.325, 2.325, 2.344, 2.374, 2.412, 2.444, 2.476, 2.505, 2.519,
2.543, 2.518, 2.479, 2.435, 2.392, 2.349, 2.324, 2.285, 2.283, 2.313, 2.344, 2.374, 2.417, 2.457, 2.489, 2.506,
2.541, 2.511, 2.469, 2.421, 2.372, 2.326, 2.284, 2.277, 2.279, 2.283, 2.313, 2.357, 2.401, 2.443, 2.479, 2.504,
2.541, 2.511, 2.469, 2.421, 2.372, 2.326, 2.284, 2.267, 2.267, 2.281, 2.313, 2.357, 2.401, 2.443, 2.479, 2.504,
2.541, 2.512, 2.472, 2.425, 2.381, 2.338, 2.302, 2.278, 2.279, 2.301, 2.324, 2.364, 2.407, 2.447, 2.481, 2.504,
2.544, 2.519, 2.483, 2.441, 2.401, 2.363, 2.338, 2.302, 2.302, 2.324, 2.355, 2.385, 2.423, 2.459, 2.488, 2.506,
2.549, 2.527, 2.497, 2.463, 2.427, 2.401, 2.363, 2.345, 2.345, 2.355, 2.385, 2.412, 2.444, 2.473, 2.497, 2.509,
2.552, 2.532, 2.507, 2.481, 2.459, 2.427, 2.402, 2.389, 2.389, 2.394, 2.412, 2.444, 2.465, 2.481, 2.499, 2.511,
2.553, 2.533, 2.508, 2.489, 2.475, 2.454, 2.429, 2.429, 2.429, 2.429, 2.439, 2.463, 2.481, 2.492, 2.504, 2.511
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
3.132, 3.126, 3.116, 3.103, 3.097, 3.091, 3.087, 3.086, 3.088, 3.091, 3.092, 3.102, 3.113, 3.121, 3.141, 3.144,
3.149, 3.132, 3.123, 3.108, 3.101, 3.096, 3.091, 3.089, 3.091, 3.092, 3.101, 3.107, 3.116, 3.129, 3.144, 3.153,
3.161, 3.149, 3.129, 3.121, 3.108, 3.103, 3.101, 3.101, 3.101, 3.103, 3.107, 3.116, 3.125, 3.134, 3.153, 3.159,
3.176, 3.161, 3.144, 3.129, 3.124, 3.121, 3.117, 3.118, 3.118, 3.119, 3.122, 3.125, 3.134, 3.146, 3.159, 3.171,
3.183, 3.176, 3.157, 3.144, 3.143, 3.143, 3.139, 3.141, 3.141, 3.141, 3.141, 3.141, 3.146, 3.161, 3.171, 3.179,
3.189, 3.183, 3.165, 3.157, 3.156, 3.157, 3.159, 3.163, 3.163, 3.163, 3.163, 3.161, 3.163, 3.169, 3.179, 3.187,
3.199, 3.189, 3.171, 3.165, 3.164, 3.167, 3.171, 3.173, 3.173, 3.172, 3.171, 3.169, 3.169, 3.175, 3.187, 3.189,
3.206, 3.196, 3.177, 3.171, 3.165, 3.167, 3.171, 3.173, 3.173, 3.172, 3.171, 3.171, 3.173, 3.177, 3.192, 3.194,
3.209, 3.197, 3.178, 3.171, 3.164, 3.161, 3.159, 3.161, 3.162, 3.164, 3.167, 3.171, 3.173, 3.181, 3.193, 3.198,
3.204, 3.194, 3.176, 3.165, 3.161, 3.156, 3.154, 3.154, 3.159, 3.161, 3.164, 3.168, 3.173, 3.182, 3.198, 3.199,
3.199, 3.191, 3.176, 3.169, 3.161, 3.157, 3.153, 3.153, 3.156, 3.161, 3.164, 3.168, 3.173, 3.186, 3.196, 3.199,
3.199, 3.188, 3.179, 3.173, 3.165, 3.157, 3.153, 3.154, 3.156, 3.159, 3.167, 3.171, 3.176, 3.185, 3.193, 3.198
]
},
{
"ct": 6000,
"table":
[
1.579, 1.579, 1.577, 1.574, 1.573, 1.571, 1.571, 1.571, 1.571, 1.569, 1.569, 1.571, 1.572, 1.574, 1.577, 1.578,
1.584, 1.579, 1.578, 1.575, 1.573, 1.572, 1.571, 1.572, 1.572, 1.571, 1.571, 1.572, 1.573, 1.576, 1.578, 1.579,
1.587, 1.584, 1.579, 1.578, 1.575, 1.573, 1.573, 1.575, 1.575, 1.574, 1.573, 1.574, 1.576, 1.578, 1.581, 1.581,
1.591, 1.587, 1.584, 1.579, 1.578, 1.579, 1.579, 1.581, 1.581, 1.581, 1.578, 1.577, 1.578, 1.581, 1.585, 1.586,
1.595, 1.591, 1.587, 1.585, 1.585, 1.586, 1.587, 1.587, 1.588, 1.588, 1.585, 1.584, 1.584, 1.586, 1.589, 1.589,
1.597, 1.595, 1.591, 1.589, 1.591, 1.593, 1.595, 1.596, 1.597, 1.597, 1.595, 1.594, 1.592, 1.592, 1.593, 1.593,
1.601, 1.597, 1.593, 1.592, 1.593, 1.595, 1.598, 1.599, 1.602, 1.601, 1.598, 1.596, 1.595, 1.596, 1.595, 1.595,
1.601, 1.599, 1.594, 1.593, 1.593, 1.595, 1.598, 1.599, 1.602, 1.601, 1.598, 1.597, 1.597, 1.597, 1.597, 1.597,
1.602, 1.599, 1.594, 1.593, 1.592, 1.593, 1.595, 1.597, 1.597, 1.598, 1.598, 1.597, 1.597, 1.597, 1.598, 1.598,
1.599, 1.598, 1.594, 1.592, 1.591, 1.591, 1.592, 1.595, 1.596, 1.597, 1.597, 1.597, 1.597, 1.599, 1.599, 1.599,
1.598, 1.596, 1.594, 1.593, 1.592, 1.592, 1.592, 1.594, 1.595, 1.597, 1.597, 1.597, 1.598, 1.599, 1.599, 1.599,
1.597, 1.595, 1.594, 1.594, 1.593, 1.592, 1.593, 1.595, 1.595, 1.597, 1.598, 1.598, 1.598, 1.599, 1.599, 1.599
]
}
],
"luminance_lut":
[
2.887, 2.754, 2.381, 2.105, 1.859, 1.678, 1.625, 1.623, 1.623, 1.624, 1.669, 1.849, 2.092, 2.362, 2.723, 2.838,
2.754, 2.443, 2.111, 1.905, 1.678, 1.542, 1.455, 1.412, 1.412, 1.452, 1.535, 1.665, 1.893, 2.096, 2.413, 2.723,
2.443, 2.216, 1.911, 1.678, 1.537, 1.372, 1.288, 1.245, 1.245, 1.283, 1.363, 1.527, 1.665, 1.895, 2.193, 2.413,
2.318, 2.057, 1.764, 1.541, 1.372, 1.282, 1.159, 1.113, 1.113, 1.151, 1.269, 1.363, 1.527, 1.749, 2.034, 2.278,
2.259, 1.953, 1.671, 1.452, 1.283, 1.159, 1.107, 1.018, 1.017, 1.097, 1.151, 1.269, 1.437, 1.655, 1.931, 2.222,
2.257, 1.902, 1.624, 1.408, 1.239, 1.111, 1.019, 1.011, 1.005, 1.014, 1.098, 1.227, 1.395, 1.608, 1.883, 2.222,
2.257, 1.902, 1.624, 1.408, 1.239, 1.111, 1.016, 1.001, 1.001, 1.007, 1.098, 1.227, 1.395, 1.608, 1.883, 2.222,
2.257, 1.946, 1.666, 1.448, 1.281, 1.153, 1.093, 1.013, 1.008, 1.089, 1.143, 1.269, 1.437, 1.654, 1.934, 2.226,
2.309, 2.044, 1.756, 1.532, 1.363, 1.259, 1.153, 1.093, 1.093, 1.143, 1.264, 1.354, 1.524, 1.746, 2.035, 2.284,
2.425, 2.201, 1.896, 1.662, 1.519, 1.363, 1.259, 1.214, 1.214, 1.264, 1.354, 1.519, 1.655, 1.888, 2.191, 2.413,
2.724, 2.417, 2.091, 1.888, 1.662, 1.519, 1.419, 1.373, 1.373, 1.425, 1.521, 1.655, 1.885, 2.089, 2.409, 2.722,
2.858, 2.724, 2.356, 2.085, 1.842, 1.658, 1.581, 1.577, 1.577, 1.579, 1.653, 1.838, 2.084, 2.359, 2.722, 2.842
],
"sigma": 0.00372,
"sigma_Cb": 0.00244
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2890,
"ccm":
[
1.36754, -0.18448, -0.18306,
-0.32356, 1.44826, -0.12471,
-0.00412, -0.69936, 1.70348
]
},
{
"ct": 2920,
"ccm":
[
1.26704, 0.01624, -0.28328,
-0.28516, 1.38934, -0.10419,
-0.04854, -0.82211, 1.87066
]
},
{
"ct": 3550,
"ccm":
[
1.42836, -0.27235, -0.15601,
-0.28751, 1.41075, -0.12325,
-0.01812, -0.54849, 1.56661
]
},
{
"ct": 4500,
"ccm":
[
1.36328, -0.19569, -0.16759,
-0.25254, 1.52248, -0.26994,
-0.01575, -0.53155, 1.54729
]
},
{
"ct": 5700,
"ccm":
[
1.49207, -0.37245, -0.11963,
-0.21493, 1.40005, -0.18512,
-0.03781, -0.38779, 1.42561
]
},
{
"ct": 7900,
"ccm":
[
1.34849, -0.05425, -0.29424,
-0.22182, 1.77684, -0.55502,
-0.07403, -0.55336, 1.62739
]
}
]
}
},
{
"rpi.sharpen": { }
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx477_scientific.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27242,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 830,
"reference_Y": 17755
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.767
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01078
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2000.0, 0.6331025775790707, 0.27424225990946915,
2200.0, 0.5696117366212947, 0.3116091368689487,
2400.0, 0.5204264653110015, 0.34892179554105873,
2600.0, 0.48148675531667223, 0.38565229719076793,
2800.0, 0.450085403501908, 0.42145684622485047,
3000.0, 0.42436130159169017, 0.45611835670028816,
3200.0, 0.40300023695527337, 0.48950766215198593,
3400.0, 0.3850520052612984, 0.5215567075837261,
3600.0, 0.36981508088230314, 0.5522397906415475,
4100.0, 0.333468007836758, 0.5909770465167908,
4600.0, 0.31196097364221376, 0.6515706327327178,
5100.0, 0.2961860409294588, 0.7068178946570284,
5600.0, 0.2842607232745885, 0.7564837749584288,
6100.0, 0.2750265787051251, 0.8006183524920533,
6600.0, 0.2677057225584924, 0.8398879225373039,
7100.0, 0.2617955199757274, 0.8746456080032436,
7600.0, 0.25693714288250125, 0.905569559506562,
8100.0, 0.25287531441063316, 0.9331696750390895,
8600.0, 0.24946601483331993, 0.9576820904825795
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.0238,
"transverse_neg": 0.04429,
"coarse_step": 0.1
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.contrast":
{
"ce_enable": 0,
"gamma_curve":
[
0, 0,
512, 2304,
1024, 4608,
1536, 6573,
2048, 8401,
2560, 9992,
3072, 11418,
3584, 12719,
4096, 13922,
4608, 15045,
5120, 16103,
5632, 17104,
6144, 18056,
6656, 18967,
7168, 19839,
7680, 20679,
8192, 21488,
9216, 23028,
10240, 24477,
11264, 25849,
12288, 27154,
13312, 28401,
14336, 29597,
15360, 30747,
16384, 31856,
17408, 32928,
18432, 33966,
19456, 34973,
20480, 35952,
22528, 37832,
24576, 39621,
26624, 41330,
28672, 42969,
30720, 44545,
32768, 46065,
34816, 47534,
36864, 48956,
38912, 50336,
40960, 51677,
43008, 52982,
45056, 54253,
47104, 55493,
49152, 56704,
51200, 57888,
53248, 59046,
55296, 60181,
57344, 61292,
59392, 62382,
61440, 63452,
63488, 64503,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2000,
"ccm":
[
1.5813882365848004, -0.35293683714581114, -0.27378771561617715,
-0.4347297185453639, 1.5792631087746074, -0.12102601986382337,
0.2322290578987574, -1.4382672640468128, 2.1386425781770755
]
},
{
"ct": 2200,
"ccm":
[
1.6322048484088305, -0.45932286857238486, -0.21373542690252198,
-0.3970719209901105, 1.5877868651467202, -0.17249380832122455,
0.20753774825903412, -1.2660673594740142, 2.005654261091916
]
},
{
"ct": 2400,
"ccm":
[
1.6766610071470398, -0.5447101051688111, -0.16838641107407676,
-0.3659845183388154, 1.592223692670396, -0.2127091997471162,
0.1833964516767549, -1.1339155942419321, 1.9089342978542396
]
},
{
"ct": 2600,
"ccm":
[
1.7161984340622154, -0.6152585785678794, -0.1331100845092582,
-0.33972082628066275, 1.5944888273736966, -0.2453979465898787,
0.1615577497676328, -1.0298684958833109, 1.8357854177422053
]
},
{
"ct": 2800,
"ccm":
[
1.7519307259815728, -0.6748682080165339, -0.10515169074540848,
-0.3171703484479931, 1.5955820297498486, -0.2727395854813966,
0.14230870739974305, -0.9460976023551511, 1.778709391659538
]
},
{
"ct": 3000,
"ccm":
[
1.7846716625128374, -0.7261240476375332, -0.08274697420358428,
-0.2975654035173307, 1.5960425637021738, -0.2961043416505157,
0.12546426281675097, -0.8773434727076518, 1.7330356805246685
]
},
{
"ct": 3200,
"ccm":
[
1.8150085872943436, -0.7708109672515514, -0.06469468211419174,
-0.2803468940646277, 1.596168842967451, -0.3164044170681625,
0.11071494533513807, -0.8199772290209191, 1.69572135046367
]
},
{
"ct": 3400,
"ccm":
[
1.8433668304932087, -0.8102060605062592, -0.05013485852801454,
-0.2650934036324084, 1.5961288492969294, -0.33427554893845535,
0.0977478941863518, -0.7714303112098978, 1.6647070820146963
]
},
{
"ct": 3600,
"ccm":
[
1.8700575831917468, -0.8452518300291346, -0.03842644337477299,
-0.2514794528347016, 1.5960178299141876, -0.3501774949366156,
0.08628520830733245, -0.729841503339915, 1.638553343939267
]
},
{
"ct": 4100,
"ccm":
[
1.8988700903560716, -0.8911278803351247, -0.018848644425650693,
-0.21487101487384094, 1.599236541382614, -0.39405450457918206,
0.08251488056482173, -0.7178919368326191, 1.6267009056502704
]
},
{
"ct": 4600,
"ccm":
[
1.960355191764125, -0.9624344812121991, -0.0017122408632169205,
-0.19444620905212898, 1.5978493736948447, -0.416727638296156,
0.06310261513271084, -0.6483790952487849, 1.5834605477213093
]
},
{
"ct": 5100,
"ccm":
[
2.014680536961399, -1.0195930302148566, 0.007728256612638915,
-0.17751999660735496, 1.5977081555831, -0.4366085498741474,
0.04741267583041334, -0.5950327902073489, 1.5512919847321853
]
},
{
"ct": 5600,
"ccm":
[
2.062652337917251, -1.0658386679125478, 0.011886354256281267,
-0.16319197721451495, 1.598363237584736, -0.45422061523742235,
0.03465810928795378, -0.5535454108047286, 1.5269025836946852
]
},
{
"ct": 6100,
"ccm":
[
2.104985902038069, -1.103597868736314, 0.012503517136539277,
-0.15090797064906178, 1.5994703078166095, -0.4698414300864995,
0.02421766063474242, -0.5208922818196823, 1.5081270847783788
]
},
{
"ct": 6600,
"ccm":
[
2.1424988751299714, -1.134760232367728, 0.010730356010435522,
-0.14021846798466234, 1.600822462230719, -0.48379204794526487,
0.015521315410496622, -0.49463630325832275, 1.4933313534840327
]
},
{
"ct": 7100,
"ccm":
[
2.1758034100130925, -1.1607558481037359, 0.007452724895469076,
-0.13085694672641826, 1.6022648614493245, -0.4962330524084075,
0.008226943206113427, -0.4733077192319791, 1.4815336120437468
]
},
{
"ct": 7600,
"ccm":
[
2.205529206931895, -1.1826662383072108, 0.0032019529917605167,
-0.122572009780486, 1.6037258133595753, -0.5073973734282445,
0.0020132587619863425, -0.4556590236414181, 1.471939788496745
]
},
{
"ct": 8100,
"ccm":
[
2.232224969223067, -1.2013672897252885, -0.0016234598095482985,
-0.11518026734442414, 1.6051544769439803, -0.5174558699422255,
-0.0033378143542219835, -0.4408590373867774, 1.4640252230667452
]
},
{
"ct": 8600,
"ccm":
[
2.256082295891265, -1.2173210549996634, -0.0067231350481711675,
-0.10860272839843167, 1.6065150139140594, -0.5264728573611493,
-0.007952618707984149, -0.4284003574050791, 1.4574646927117558
]
}
]
}
},
{
"rpi.sharpen": { }
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/uncalibrated.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.awb":
{
"use_derivatives": 0,
"bayes": 0
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
4, 4, 4, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 3.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 30000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.4,
1000, 0.4
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 4000,
"ccm":
[
2.0, -1.0, 0.0,
-0.5, 2.0, -0.5,
0, -1.0, 2.0
]
}
]
}
},
{
"rpi.contrast":
{
"ce_enable": 0,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/ov9281_mono.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.lux":
{
"reference_shutter_speed": 2000,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 800,
"reference_Y": 20000
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.5
}
},
{
"rpi.sdn": { }
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
4, 4, 4, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 3.0, 4.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 30000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.4,
1000, 0.4
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"n_iter": 0,
"luminance_strength": 1.0,
"corner_strength": 1.5
}
},
{
"rpi.contrast":
{
"ce_enable": 0,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx708_wide.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 9989,
"reference_gain": 1.23,
"reference_aperture": 1.0,
"reference_lux": 980,
"reference_Y": 8345
}
},
{
"rpi.noise":
{
"reference_constant": 16.0,
"reference_slope": 4.0
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.00287
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2750.0, 0.7881, 0.2849,
2940.0, 0.7559, 0.3103,
3650.0, 0.6291, 0.4206,
4625.0, 0.5336, 0.5161,
5715.0, 0.4668, 0.5898
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.01165,
"transverse_neg": 0.01601
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.529, 1.526, 1.522, 1.506, 1.489, 1.473, 1.458, 1.456, 1.456, 1.458, 1.474, 1.493, 1.513, 1.531, 1.541, 1.544,
1.527, 1.523, 1.511, 1.491, 1.474, 1.459, 1.445, 1.441, 1.441, 1.446, 1.461, 1.479, 1.499, 1.521, 1.536, 1.541,
1.524, 1.515, 1.498, 1.477, 1.459, 1.444, 1.431, 1.426, 1.426, 1.435, 1.446, 1.466, 1.487, 1.507, 1.528, 1.538,
1.522, 1.512, 1.491, 1.468, 1.447, 1.431, 1.423, 1.417, 1.418, 1.425, 1.435, 1.455, 1.479, 1.499, 1.523, 1.537,
1.522, 1.509, 1.485, 1.463, 1.441, 1.423, 1.416, 1.413, 1.415, 1.418, 1.429, 1.449, 1.473, 1.495, 1.521, 1.538,
1.522, 1.508, 1.483, 1.461, 1.438, 1.421, 1.413, 1.412, 1.412, 1.415, 1.428, 1.447, 1.471, 1.493, 1.519, 1.538,
1.522, 1.509, 1.484, 1.462, 1.439, 1.421, 1.414, 1.411, 1.412, 1.416, 1.428, 1.447, 1.471, 1.493, 1.519, 1.537,
1.523, 1.511, 1.487, 1.465, 1.443, 1.424, 1.417, 1.413, 1.415, 1.419, 1.429, 1.451, 1.473, 1.494, 1.519, 1.536,
1.524, 1.514, 1.493, 1.471, 1.451, 1.434, 1.424, 1.419, 1.419, 1.428, 1.437, 1.457, 1.477, 1.498, 1.521, 1.538,
1.527, 1.521, 1.503, 1.481, 1.462, 1.449, 1.434, 1.429, 1.429, 1.437, 1.451, 1.469, 1.488, 1.508, 1.527, 1.539,
1.529, 1.527, 1.515, 1.495, 1.477, 1.462, 1.449, 1.444, 1.444, 1.451, 1.467, 1.481, 1.499, 1.519, 1.535, 1.543,
1.534, 1.531, 1.527, 1.512, 1.492, 1.476, 1.463, 1.461, 1.461, 1.464, 1.479, 1.495, 1.515, 1.533, 1.543, 1.546
]
},
{
"ct": 5000,
"table":
[
2.603, 2.599, 2.591, 2.567, 2.539, 2.515, 2.489, 2.489, 2.489, 2.491, 2.516, 2.543, 2.574, 2.597, 2.614, 2.617,
2.596, 2.591, 2.571, 2.542, 2.516, 2.489, 2.464, 2.458, 2.458, 2.469, 2.492, 2.518, 2.547, 2.576, 2.602, 2.614,
2.591, 2.576, 2.546, 2.519, 2.489, 2.464, 2.437, 2.427, 2.427, 2.441, 2.467, 2.492, 2.525, 2.553, 2.586, 2.605,
2.588, 2.568, 2.534, 2.503, 2.472, 2.437, 2.423, 2.409, 2.411, 2.425, 2.441, 2.475, 2.513, 2.541, 2.577, 2.602,
2.588, 2.565, 2.527, 2.494, 2.461, 2.425, 2.409, 2.399, 2.403, 2.409, 2.431, 2.466, 2.503, 2.534, 2.571, 2.601,
2.586, 2.561, 2.525, 2.491, 2.454, 2.418, 2.399, 2.396, 2.395, 2.402, 2.424, 2.461, 2.501, 2.531, 2.567, 2.599,
2.583, 2.559, 2.525, 2.491, 2.454, 2.418, 2.398, 2.393, 2.393, 2.401, 2.423, 2.459, 2.498, 2.531, 2.566, 2.597,
2.583, 2.559, 2.526, 2.494, 2.458, 2.421, 2.404, 2.397, 2.399, 2.404, 2.426, 2.461, 2.501, 2.531, 2.566, 2.596,
2.583, 2.563, 2.531, 2.501, 2.469, 2.435, 2.419, 2.405, 2.404, 2.422, 2.435, 2.471, 2.505, 2.537, 2.572, 2.596,
2.585, 2.571, 2.539, 2.516, 2.486, 2.458, 2.435, 2.424, 2.424, 2.435, 2.459, 2.489, 2.521, 2.546, 2.579, 2.601,
2.589, 2.578, 2.557, 2.532, 2.506, 2.483, 2.458, 2.449, 2.449, 2.459, 2.485, 2.507, 2.535, 2.563, 2.591, 2.605,
2.589, 2.586, 2.575, 2.551, 2.525, 2.503, 2.481, 2.476, 2.476, 2.481, 2.504, 2.526, 2.555, 2.583, 2.604, 2.611
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
3.311, 3.339, 3.369, 3.374, 3.371, 3.363, 3.356, 3.353, 3.353, 3.353, 3.357, 3.362, 3.362, 3.356, 3.328, 3.311,
3.321, 3.354, 3.374, 3.374, 3.368, 3.359, 3.352, 3.349, 3.347, 3.347, 3.349, 3.357, 3.361, 3.359, 3.343, 3.324,
3.334, 3.368, 3.375, 3.374, 3.365, 3.356, 3.349, 3.347, 3.346, 3.346, 3.347, 3.349, 3.358, 3.361, 3.357, 3.336,
3.346, 3.378, 3.378, 3.369, 3.363, 3.358, 3.351, 3.348, 3.347, 3.346, 3.347, 3.348, 3.354, 3.364, 3.363, 3.345,
3.351, 3.381, 3.381, 3.368, 3.361, 3.357, 3.349, 3.347, 3.347, 3.345, 3.345, 3.347, 3.353, 3.364, 3.364, 3.347,
3.353, 3.379, 3.379, 3.366, 3.359, 3.351, 3.348, 3.343, 3.342, 3.342, 3.343, 3.345, 3.351, 3.363, 3.363, 3.347,
3.353, 3.376, 3.376, 3.363, 3.351, 3.347, 3.343, 3.338, 3.336, 3.338, 3.339, 3.343, 3.351, 3.361, 3.361, 3.347,
3.351, 3.374, 3.374, 3.359, 3.351, 3.345, 3.338, 3.334, 3.333, 3.334, 3.336, 3.339, 3.347, 3.358, 3.358, 3.345,
3.346, 3.368, 3.368, 3.359, 3.349, 3.343, 3.336, 3.332, 3.327, 3.331, 3.333, 3.337, 3.346, 3.356, 3.356, 3.341,
3.336, 3.362, 3.364, 3.359, 3.351, 3.342, 3.334, 3.324, 3.324, 3.325, 3.329, 3.336, 3.346, 3.351, 3.351, 3.333,
3.324, 3.349, 3.359, 3.358, 3.352, 3.341, 3.329, 3.323, 3.321, 3.322, 3.326, 3.336, 3.346, 3.347, 3.339, 3.319,
3.311, 3.328, 3.352, 3.354, 3.352, 3.341, 3.329, 3.321, 3.319, 3.321, 3.324, 3.338, 3.343, 3.343, 3.319, 3.312
]
},
{
"ct": 5000,
"table":
[
1.634, 1.647, 1.665, 1.668, 1.668, 1.664, 1.662, 1.662, 1.661, 1.661, 1.661, 1.663, 1.663, 1.659, 1.643, 1.636,
1.639, 1.656, 1.668, 1.669, 1.668, 1.666, 1.664, 1.663, 1.663, 1.661, 1.661, 1.662, 1.663, 1.662, 1.654, 1.642,
1.645, 1.663, 1.669, 1.668, 1.667, 1.667, 1.667, 1.668, 1.668, 1.665, 1.662, 1.661, 1.662, 1.664, 1.661, 1.649,
1.651, 1.669, 1.669, 1.667, 1.666, 1.668, 1.669, 1.672, 1.672, 1.668, 1.665, 1.661, 1.661, 1.665, 1.665, 1.655,
1.654, 1.669, 1.669, 1.666, 1.666, 1.669, 1.672, 1.673, 1.673, 1.671, 1.666, 1.661, 1.661, 1.665, 1.665, 1.659,
1.654, 1.669, 1.669, 1.666, 1.666, 1.669, 1.671, 1.673, 1.672, 1.669, 1.667, 1.661, 1.661, 1.665, 1.665, 1.659,
1.654, 1.668, 1.668, 1.664, 1.663, 1.667, 1.669, 1.671, 1.669, 1.668, 1.665, 1.661, 1.661, 1.663, 1.663, 1.659,
1.653, 1.665, 1.665, 1.661, 1.661, 1.664, 1.667, 1.668, 1.668, 1.665, 1.661, 1.658, 1.659, 1.662, 1.662, 1.657,
1.651, 1.664, 1.664, 1.659, 1.659, 1.661, 1.663, 1.663, 1.662, 1.661, 1.658, 1.656, 1.657, 1.662, 1.662, 1.655,
1.645, 1.661, 1.663, 1.661, 1.659, 1.659, 1.659, 1.657, 1.657, 1.656, 1.654, 1.655, 1.656, 1.661, 1.661, 1.649,
1.641, 1.654, 1.661, 1.661, 1.659, 1.657, 1.655, 1.653, 1.652, 1.651, 1.652, 1.653, 1.657, 1.658, 1.655, 1.644,
1.635, 1.645, 1.661, 1.661, 1.661, 1.655, 1.653, 1.649, 1.648, 1.647, 1.651, 1.653, 1.657, 1.657, 1.646, 1.638
]
}
],
"luminance_lut":
[
3.535, 3.279, 3.049, 2.722, 2.305, 1.958, 1.657, 1.647, 1.647, 1.656, 1.953, 2.289, 2.707, 3.058, 3.325, 3.589,
3.379, 3.157, 2.874, 2.421, 1.973, 1.735, 1.472, 1.388, 1.388, 1.471, 1.724, 1.963, 2.409, 2.877, 3.185, 3.416,
3.288, 3.075, 2.696, 2.169, 1.735, 1.472, 1.311, 1.208, 1.208, 1.306, 1.471, 1.724, 2.159, 2.695, 3.092, 3.321,
3.238, 3.001, 2.534, 1.981, 1.572, 1.311, 1.207, 1.082, 1.082, 1.204, 1.306, 1.563, 1.973, 2.529, 3.008, 3.259,
3.211, 2.938, 2.414, 1.859, 1.468, 1.221, 1.082, 1.036, 1.031, 1.079, 1.217, 1.463, 1.851, 2.403, 2.931, 3.229,
3.206, 2.904, 2.356, 1.802, 1.421, 1.181, 1.037, 1.002, 1.002, 1.032, 1.175, 1.414, 1.793, 2.343, 2.899, 3.223,
3.206, 2.904, 2.356, 1.802, 1.421, 1.181, 1.037, 1.005, 1.005, 1.032, 1.175, 1.414, 1.793, 2.343, 2.899, 3.223,
3.211, 2.936, 2.417, 1.858, 1.468, 1.222, 1.083, 1.037, 1.032, 1.083, 1.218, 1.463, 1.848, 2.403, 2.932, 3.226,
3.234, 2.997, 2.536, 1.979, 1.569, 1.311, 1.206, 1.084, 1.084, 1.204, 1.305, 1.565, 1.966, 2.524, 2.996, 3.251,
3.282, 3.069, 2.697, 2.166, 1.731, 1.471, 1.311, 1.207, 1.207, 1.305, 1.466, 1.729, 2.158, 2.689, 3.077, 3.304,
3.369, 3.146, 2.873, 2.415, 1.964, 1.722, 1.471, 1.382, 1.382, 1.466, 1.722, 1.964, 2.408, 2.871, 3.167, 3.401,
3.524, 3.253, 3.025, 2.691, 2.275, 1.939, 1.657, 1.628, 1.628, 1.654, 1.936, 2.275, 2.687, 3.029, 3.284, 3.574
],
"sigma": 0.00195,
"sigma_Cb": 0.00241
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2868,
"ccm":
[
1.58923, -0.36649, -0.22273,
-0.43591, 1.84858, -0.41268,
0.02948, -0.77666, 1.74718
]
},
{
"ct": 2965,
"ccm":
[
1.73397, -0.42794, -0.30603,
-0.36504, 1.72431, -0.35926,
0.12765, -1.10933, 1.98168
]
},
{
"ct": 3603,
"ccm":
[
1.61787, -0.42704, -0.19084,
-0.37819, 1.74588, -0.36769,
0.00961, -0.59807, 1.58847
]
},
{
"ct": 4620,
"ccm":
[
1.55581, -0.35422, -0.20158,
-0.31805, 1.79309, -0.47505,
-0.01256, -0.54489, 1.55746
]
},
{
"ct": 5901,
"ccm":
[
1.64439, -0.48855, -0.15585,
-0.29149, 1.67122, -0.37972,
-0.03111, -0.44052, 1.47163
]
},
{
"ct": 7610,
"ccm":
[
1.48667, -0.26072, -0.22595,
-0.21815, 1.86724, -0.64909,
-0.00985, -0.64485, 1.65471
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.af":
{
"ranges":
{
"normal":
{
"min": 0.0,
"max": 12.0,
"default": 1.0
},
"macro":
{
"min": 4.0,
"max": 32.0,
"default": 6.0
}
},
"speeds":
{
"normal":
{
"step_coarse": 2.0,
"step_fine": 0.5,
"contrast_ratio": 0.75,
"pdaf_gain": -0.03,
"pdaf_squelch": 0.2,
"max_slew": 4.0,
"pdaf_frames": 20,
"dropout_frames": 6,
"step_frames": 4
},
"fast":
{
"step_coarse": 2.0,
"step_fine": 0.5,
"contrast_ratio": 0.75,
"pdaf_gain": -0.05,
"pdaf_squelch": 0.2,
"max_slew": 5.0,
"pdaf_frames": 16,
"dropout_frames": 6,
"step_frames": 4
}
},
"conf_epsilon": 8,
"conf_thresh": 12,
"conf_clip": 512,
"skip_frames": 5,
"map": [ 0.0, 420, 35.0, 920 ]
}
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/se327m12.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 3840
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 6873,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 800,
"reference_Y": 12293
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 1.986
}
},
{
"rpi.geq":
{
"offset": 207,
"slope": 0.00539
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2900.0, 0.9217, 0.3657,
3600.0, 0.7876, 0.4651,
4600.0, 0.6807, 0.5684,
5800.0, 0.5937, 0.6724,
8100.0, 0.5447, 0.7403
],
"sensitivity_r": 1.0,
"sensitivity_b": 1.0,
"transverse_pos": 0.0162,
"transverse_neg": 0.0204
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 4000,
"table":
[
1.481, 1.471, 1.449, 1.429, 1.416, 1.404, 1.394, 1.389, 1.389, 1.389, 1.392, 1.397, 1.404, 1.416, 1.429, 1.437,
1.472, 1.456, 1.436, 1.418, 1.405, 1.394, 1.389, 1.384, 1.382, 1.382, 1.386, 1.388, 1.398, 1.407, 1.422, 1.429,
1.465, 1.443, 1.426, 1.411, 1.397, 1.389, 1.383, 1.377, 1.377, 1.377, 1.379, 1.384, 1.388, 1.398, 1.411, 1.422,
1.462, 1.441, 1.423, 1.409, 1.395, 1.385, 1.379, 1.376, 1.374, 1.374, 1.375, 1.379, 1.384, 1.394, 1.407, 1.418,
1.461, 1.439, 1.421, 1.407, 1.394, 1.385, 1.381, 1.376, 1.373, 1.373, 1.373, 1.376, 1.381, 1.389, 1.403, 1.415,
1.461, 1.439, 1.419, 1.404, 1.392, 1.384, 1.379, 1.376, 1.373, 1.372, 1.374, 1.375, 1.379, 1.389, 1.401, 1.413,
1.461, 1.438, 1.419, 1.402, 1.389, 1.383, 1.377, 1.375, 1.373, 1.372, 1.372, 1.375, 1.381, 1.388, 1.401, 1.414,
1.462, 1.438, 1.419, 1.403, 1.391, 1.381, 1.377, 1.374, 1.373, 1.373, 1.374, 1.376, 1.381, 1.389, 1.401, 1.414,
1.462, 1.441, 1.423, 1.405, 1.392, 1.383, 1.377, 1.374, 1.373, 1.372, 1.373, 1.376, 1.382, 1.391, 1.402, 1.414,
1.465, 1.444, 1.424, 1.407, 1.393, 1.382, 1.378, 1.373, 1.369, 1.369, 1.372, 1.375, 1.381, 1.389, 1.402, 1.417,
1.469, 1.449, 1.427, 1.413, 1.396, 1.384, 1.381, 1.375, 1.371, 1.371, 1.373, 1.377, 1.385, 1.393, 1.407, 1.422,
1.474, 1.456, 1.436, 1.419, 1.407, 1.391, 1.383, 1.379, 1.377, 1.377, 1.378, 1.381, 1.391, 1.404, 1.422, 1.426
]
},
{
"ct": 5000,
"table":
[
1.742, 1.721, 1.689, 1.661, 1.639, 1.623, 1.613, 1.609, 1.607, 1.606, 1.609, 1.617, 1.626, 1.641, 1.665, 1.681,
1.728, 1.703, 1.672, 1.645, 1.631, 1.614, 1.602, 1.599, 1.596, 1.597, 1.601, 1.608, 1.618, 1.631, 1.653, 1.671,
1.713, 1.691, 1.658, 1.635, 1.618, 1.606, 1.595, 1.591, 1.588, 1.588, 1.591, 1.601, 1.608, 1.624, 1.641, 1.658,
1.707, 1.681, 1.651, 1.627, 1.613, 1.599, 1.591, 1.585, 1.583, 1.584, 1.587, 1.591, 1.601, 1.615, 1.633, 1.655,
1.699, 1.672, 1.644, 1.622, 1.606, 1.593, 1.586, 1.581, 1.579, 1.581, 1.583, 1.587, 1.597, 1.611, 1.631, 1.652,
1.697, 1.665, 1.637, 1.617, 1.601, 1.589, 1.584, 1.579, 1.577, 1.578, 1.581, 1.585, 1.597, 1.607, 1.627, 1.652,
1.697, 1.662, 1.634, 1.613, 1.599, 1.591, 1.583, 1.578, 1.576, 1.576, 1.579, 1.586, 1.597, 1.607, 1.628, 1.653,
1.697, 1.662, 1.633, 1.613, 1.598, 1.589, 1.582, 1.578, 1.576, 1.577, 1.582, 1.589, 1.598, 1.611, 1.635, 1.655,
1.701, 1.666, 1.636, 1.616, 1.602, 1.589, 1.583, 1.578, 1.577, 1.581, 1.583, 1.591, 1.601, 1.617, 1.639, 1.659,
1.708, 1.671, 1.641, 1.618, 1.603, 1.591, 1.584, 1.581, 1.578, 1.581, 1.585, 1.594, 1.604, 1.622, 1.646, 1.666,
1.714, 1.681, 1.648, 1.622, 1.608, 1.599, 1.591, 1.584, 1.583, 1.584, 1.589, 1.599, 1.614, 1.629, 1.653, 1.673,
1.719, 1.691, 1.659, 1.631, 1.618, 1.606, 1.596, 1.591, 1.591, 1.593, 1.599, 1.608, 1.623, 1.642, 1.665, 1.681
]
}
],
"calibrations_Cb": [
{
"ct": 4000,
"table":
[
2.253, 2.267, 2.289, 2.317, 2.342, 2.359, 2.373, 2.381, 2.381, 2.378, 2.368, 2.361, 2.344, 2.337, 2.314, 2.301,
2.262, 2.284, 2.314, 2.335, 2.352, 2.371, 2.383, 2.391, 2.393, 2.391, 2.381, 2.368, 2.361, 2.342, 2.322, 2.308,
2.277, 2.303, 2.321, 2.346, 2.364, 2.381, 2.391, 2.395, 2.397, 2.397, 2.395, 2.381, 2.367, 2.354, 2.332, 2.321,
2.277, 2.304, 2.327, 2.349, 2.369, 2.388, 2.393, 2.396, 2.396, 2.398, 2.396, 2.391, 2.376, 2.359, 2.339, 2.328,
2.279, 2.311, 2.327, 2.354, 2.377, 2.389, 2.393, 2.397, 2.397, 2.398, 2.395, 2.393, 2.382, 2.363, 2.344, 2.332,
2.282, 2.311, 2.329, 2.354, 2.377, 2.386, 2.396, 2.396, 2.395, 2.396, 2.397, 2.394, 2.383, 2.367, 2.346, 2.333,
2.283, 2.314, 2.333, 2.353, 2.375, 2.389, 2.394, 2.395, 2.395, 2.395, 2.396, 2.394, 2.386, 2.368, 2.354, 2.336,
2.287, 2.309, 2.331, 2.352, 2.373, 2.386, 2.394, 2.395, 2.395, 2.396, 2.396, 2.394, 2.384, 2.371, 2.354, 2.339,
2.289, 2.307, 2.326, 2.347, 2.369, 2.385, 2.392, 2.397, 2.398, 2.398, 2.397, 2.392, 2.383, 2.367, 2.352, 2.337,
2.286, 2.303, 2.322, 2.342, 2.361, 2.379, 2.389, 2.394, 2.397, 2.398, 2.396, 2.389, 2.381, 2.366, 2.346, 2.332,
2.284, 2.291, 2.312, 2.329, 2.351, 2.372, 2.381, 2.389, 2.393, 2.394, 2.389, 2.385, 2.374, 2.362, 2.338, 2.325,
2.283, 2.288, 2.305, 2.319, 2.339, 2.365, 2.374, 2.381, 2.384, 2.386, 2.385, 2.379, 2.368, 2.342, 2.325, 2.318
]
},
{
"ct": 5000,
"table":
[
1.897, 1.919, 1.941, 1.969, 1.989, 2.003, 2.014, 2.019, 2.019, 2.017, 2.014, 2.008, 1.999, 1.988, 1.968, 1.944,
1.914, 1.932, 1.957, 1.982, 1.998, 2.014, 2.023, 2.029, 2.031, 2.029, 2.022, 2.014, 2.006, 1.995, 1.976, 1.955,
1.925, 1.951, 1.974, 1.996, 2.013, 2.027, 2.035, 2.039, 2.039, 2.038, 2.035, 2.026, 2.015, 2.002, 1.984, 1.963,
1.932, 1.958, 1.986, 2.007, 2.024, 2.034, 2.041, 2.041, 2.045, 2.045, 2.042, 2.033, 2.023, 2.009, 1.995, 1.971,
1.942, 1.964, 1.994, 2.012, 2.029, 2.038, 2.043, 2.046, 2.047, 2.046, 2.045, 2.039, 2.029, 2.014, 1.997, 1.977,
1.946, 1.974, 1.999, 2.015, 2.031, 2.041, 2.046, 2.047, 2.048, 2.047, 2.044, 2.041, 2.031, 2.019, 1.999, 1.978,
1.948, 1.975, 2.002, 2.018, 2.031, 2.041, 2.046, 2.047, 2.048, 2.048, 2.045, 2.041, 2.029, 2.019, 1.998, 1.978,
1.948, 1.973, 2.002, 2.018, 2.029, 2.042, 2.045, 2.048, 2.048, 2.048, 2.044, 2.037, 2.027, 2.014, 1.993, 1.978,
1.945, 1.969, 1.998, 2.015, 2.028, 2.037, 2.045, 2.046, 2.047, 2.044, 2.039, 2.033, 2.022, 2.008, 1.989, 1.971,
1.939, 1.964, 1.991, 2.011, 2.024, 2.032, 2.036, 2.042, 2.042, 2.039, 2.035, 2.024, 2.012, 1.998, 1.977, 1.964,
1.932, 1.953, 1.981, 2.006, 2.016, 2.024, 2.028, 2.031, 2.034, 2.031, 2.024, 2.015, 2.005, 1.989, 1.966, 1.955,
1.928, 1.944, 1.973, 1.999, 2.007, 2.016, 2.019, 2.025, 2.026, 2.025, 2.017, 2.008, 1.997, 1.975, 1.958, 1.947
]
}
],
"luminance_lut":
[
1.877, 1.597, 1.397, 1.269, 1.191, 1.131, 1.093, 1.078, 1.071, 1.069, 1.086, 1.135, 1.221, 1.331, 1.474, 1.704,
1.749, 1.506, 1.334, 1.229, 1.149, 1.088, 1.058, 1.053, 1.051, 1.046, 1.053, 1.091, 1.163, 1.259, 1.387, 1.587,
1.661, 1.451, 1.295, 1.195, 1.113, 1.061, 1.049, 1.048, 1.047, 1.049, 1.049, 1.066, 1.124, 1.211, 1.333, 1.511,
1.615, 1.411, 1.267, 1.165, 1.086, 1.052, 1.047, 1.047, 1.047, 1.049, 1.052, 1.056, 1.099, 1.181, 1.303, 1.471,
1.576, 1.385, 1.252, 1.144, 1.068, 1.049, 1.044, 1.044, 1.045, 1.049, 1.053, 1.054, 1.083, 1.163, 1.283, 1.447,
1.561, 1.373, 1.245, 1.135, 1.064, 1.049, 1.044, 1.044, 1.044, 1.046, 1.048, 1.054, 1.073, 1.153, 1.271, 1.432,
1.571, 1.377, 1.242, 1.137, 1.066, 1.055, 1.052, 1.051, 1.051, 1.049, 1.047, 1.048, 1.068, 1.148, 1.271, 1.427,
1.582, 1.396, 1.259, 1.156, 1.085, 1.068, 1.059, 1.054, 1.049, 1.045, 1.041, 1.043, 1.074, 1.157, 1.284, 1.444,
1.623, 1.428, 1.283, 1.178, 1.105, 1.074, 1.069, 1.063, 1.056, 1.048, 1.046, 1.051, 1.094, 1.182, 1.311, 1.473,
1.691, 1.471, 1.321, 1.213, 1.135, 1.088, 1.073, 1.069, 1.063, 1.059, 1.053, 1.071, 1.129, 1.222, 1.351, 1.521,
1.808, 1.543, 1.371, 1.253, 1.174, 1.118, 1.085, 1.072, 1.067, 1.064, 1.071, 1.106, 1.176, 1.274, 1.398, 1.582,
1.969, 1.666, 1.447, 1.316, 1.223, 1.166, 1.123, 1.094, 1.089, 1.097, 1.118, 1.163, 1.239, 1.336, 1.471, 1.681
],
"sigma": 0.00218,
"sigma_Cb": 0.00194
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2900,
"ccm":
[
1.44924, -0.12935, -0.31989,
-0.65839, 1.95441, -0.29602,
0.18344, -1.22282, 2.03938
]
},
{
"ct": 3000,
"ccm":
[
1.38736, 0.07714, -0.46451,
-0.59691, 1.84335, -0.24644,
0.10092, -1.30441, 2.20349
]
},
{
"ct": 3600,
"ccm":
[
1.51261, -0.27921, -0.23339,
-0.55129, 1.83241, -0.28111,
0.11649, -0.93195, 1.81546
]
},
{
"ct": 4600,
"ccm":
[
1.47082, -0.18523, -0.28559,
-0.48923, 1.95126, -0.46203,
0.07951, -0.83987, 1.76036
]
},
{
"ct": 5800,
"ccm":
[
1.57294, -0.36229, -0.21065,
-0.42272, 1.80305, -0.38032,
0.03671, -0.66862, 1.63191
]
},
{
"ct": 8100,
"ccm":
[
1.58803, -0.09912, -0.48891,
-0.42594, 2.22303, -0.79709,
-0.00621, -0.90516, 1.91137
]
}
]
}
},
{
"rpi.sharpen":
{
"threshold": 2.0,
"strength": 0.5,
"limit": 0.5
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx296.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 3840
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 7598,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 800,
"reference_Y": 14028
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.671
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.01058
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 7600
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 7600
}
},
"bayes": 1,
"ct_curve":
[
2500.0, 0.5386, 0.2458,
2800.0, 0.4883, 0.3303,
2900.0, 0.4855, 0.3349,
3620.0, 0.4203, 0.4367,
4560.0, 0.3455, 0.5444,
5600.0, 0.2948, 0.6124,
7400.0, 0.2336, 0.6894
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.03093,
"transverse_neg": 0.02374
}
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 30000, 45000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 12.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 30000 ],
"gain": [ 1.0, 2.0, 4.0, 8.0, 16.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 4000,
"table":
[
2.726, 2.736, 2.737, 2.739, 2.741, 2.741, 2.742, 2.742, 2.743, 2.743, 2.742, 2.742, 2.742, 2.742, 2.741, 2.739,
2.728, 2.736, 2.739, 2.741, 2.742, 2.743, 2.744, 2.745, 2.746, 2.746, 2.745, 2.743, 2.742, 2.742, 2.742, 2.741,
2.729, 2.737, 2.741, 2.744, 2.746, 2.747, 2.748, 2.749, 2.751, 2.751, 2.749, 2.746, 2.744, 2.743, 2.743, 2.743,
2.729, 2.738, 2.743, 2.746, 2.749, 2.749, 2.751, 2.752, 2.753, 2.753, 2.752, 2.751, 2.746, 2.744, 2.744, 2.746,
2.728, 2.737, 2.742, 2.746, 2.749, 2.751, 2.754, 2.755, 2.754, 2.755, 2.754, 2.751, 2.748, 2.746, 2.747, 2.748,
2.724, 2.738, 2.742, 2.746, 2.749, 2.752, 2.755, 2.755, 2.755, 2.755, 2.754, 2.752, 2.749, 2.749, 2.748, 2.748,
2.726, 2.738, 2.741, 2.745, 2.749, 2.753, 2.754, 2.755, 2.755, 2.755, 2.754, 2.753, 2.749, 2.748, 2.748, 2.748,
2.726, 2.738, 2.741, 2.745, 2.746, 2.752, 2.753, 2.753, 2.753, 2.753, 2.754, 2.751, 2.748, 2.748, 2.746, 2.745,
2.726, 2.736, 2.738, 2.742, 2.745, 2.749, 2.752, 2.753, 2.752, 2.752, 2.751, 2.749, 2.747, 2.745, 2.744, 2.742,
2.724, 2.733, 2.736, 2.739, 2.742, 2.745, 2.748, 2.749, 2.749, 2.748, 2.748, 2.747, 2.744, 2.743, 2.742, 2.741,
2.722, 2.726, 2.733, 2.735, 2.737, 2.741, 2.743, 2.744, 2.744, 2.744, 2.744, 2.742, 2.741, 2.741, 2.739, 2.737,
2.719, 2.722, 2.727, 2.729, 2.731, 2.732, 2.734, 2.734, 2.735, 2.735, 2.735, 2.734, 2.733, 2.732, 2.732, 2.732
]
},
{
"ct": 6000,
"table":
[
3.507, 3.522, 3.525, 3.527, 3.531, 3.533, 3.534, 3.535, 3.535, 3.536, 3.536, 3.537, 3.537, 3.538, 3.537, 3.536,
3.511, 3.524, 3.528, 3.532, 3.533, 3.535, 3.537, 3.538, 3.538, 3.541, 3.539, 3.539, 3.539, 3.539, 3.538, 3.538,
3.513, 3.528, 3.532, 3.535, 3.538, 3.542, 3.543, 3.546, 3.548, 3.551, 3.547, 3.543, 3.541, 3.541, 3.541, 3.541,
3.513, 3.528, 3.533, 3.539, 3.544, 3.546, 3.548, 3.552, 3.553, 3.553, 3.552, 3.548, 3.543, 3.542, 3.542, 3.545,
3.513, 3.528, 3.534, 3.541, 3.547, 3.549, 3.552, 3.553, 3.554, 3.554, 3.553, 3.549, 3.546, 3.544, 3.547, 3.549,
3.508, 3.528, 3.533, 3.541, 3.548, 3.551, 3.553, 3.554, 3.555, 3.555, 3.555, 3.551, 3.548, 3.547, 3.549, 3.551,
3.511, 3.529, 3.534, 3.541, 3.548, 3.551, 3.553, 3.555, 3.555, 3.555, 3.556, 3.554, 3.549, 3.548, 3.548, 3.548,
3.511, 3.528, 3.533, 3.539, 3.546, 3.549, 3.553, 3.554, 3.554, 3.554, 3.554, 3.553, 3.549, 3.547, 3.547, 3.547,
3.511, 3.527, 3.533, 3.536, 3.541, 3.547, 3.551, 3.553, 3.553, 3.552, 3.551, 3.551, 3.548, 3.544, 3.542, 3.543,
3.507, 3.523, 3.528, 3.533, 3.538, 3.541, 3.546, 3.548, 3.549, 3.548, 3.548, 3.546, 3.542, 3.541, 3.541, 3.541,
3.505, 3.514, 3.523, 3.527, 3.532, 3.537, 3.538, 3.544, 3.544, 3.544, 3.542, 3.541, 3.537, 3.537, 3.536, 3.535,
3.503, 3.508, 3.515, 3.519, 3.521, 3.523, 3.524, 3.525, 3.526, 3.526, 3.527, 3.526, 3.524, 3.526, 3.527, 3.527
]
}
],
"calibrations_Cb": [
{
"ct": 4000,
"table":
[
2.032, 2.037, 2.039, 2.041, 2.041, 2.042, 2.043, 2.044, 2.045, 2.045, 2.044, 2.043, 2.042, 2.041, 2.041, 2.034,
2.032, 2.036, 2.039, 2.041, 2.042, 2.042, 2.043, 2.044, 2.045, 2.046, 2.045, 2.044, 2.042, 2.041, 2.039, 2.035,
2.032, 2.036, 2.038, 2.041, 2.043, 2.044, 2.044, 2.045, 2.046, 2.047, 2.047, 2.045, 2.043, 2.042, 2.041, 2.037,
2.032, 2.035, 2.039, 2.042, 2.043, 2.044, 2.045, 2.046, 2.048, 2.048, 2.047, 2.046, 2.045, 2.044, 2.042, 2.039,
2.031, 2.034, 2.037, 2.039, 2.043, 2.045, 2.045, 2.046, 2.047, 2.047, 2.047, 2.046, 2.045, 2.044, 2.043, 2.039,
2.029, 2.033, 2.036, 2.039, 2.042, 2.043, 2.045, 2.046, 2.046, 2.046, 2.046, 2.046, 2.046, 2.045, 2.044, 2.041,
2.028, 2.032, 2.035, 2.039, 2.041, 2.043, 2.044, 2.045, 2.045, 2.046, 2.046, 2.046, 2.046, 2.045, 2.044, 2.039,
2.027, 2.032, 2.035, 2.038, 2.039, 2.041, 2.044, 2.044, 2.044, 2.045, 2.046, 2.046, 2.046, 2.045, 2.044, 2.039,
2.027, 2.031, 2.034, 2.035, 2.037, 2.039, 2.042, 2.043, 2.044, 2.045, 2.045, 2.046, 2.045, 2.044, 2.043, 2.038,
2.025, 2.028, 2.032, 2.034, 2.036, 2.037, 2.041, 2.042, 2.043, 2.044, 2.044, 2.044, 2.044, 2.043, 2.041, 2.036,
2.024, 2.026, 2.029, 2.032, 2.034, 2.036, 2.038, 2.041, 2.041, 2.042, 2.043, 2.042, 2.041, 2.041, 2.037, 2.036,
2.022, 2.024, 2.027, 2.029, 2.032, 2.034, 2.036, 2.039, 2.039, 2.039, 2.041, 2.039, 2.039, 2.038, 2.036, 2.034
]
},
{
"ct": 6000,
"table":
[
1.585, 1.587, 1.589, 1.589, 1.589, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.589, 1.588, 1.588, 1.587, 1.581,
1.585, 1.587, 1.588, 1.589, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.588, 1.588, 1.587, 1.582,
1.585, 1.586, 1.588, 1.589, 1.591, 1.591, 1.591, 1.591, 1.592, 1.592, 1.591, 1.591, 1.589, 1.588, 1.587, 1.584,
1.585, 1.586, 1.588, 1.589, 1.591, 1.592, 1.592, 1.592, 1.593, 1.593, 1.592, 1.591, 1.589, 1.589, 1.588, 1.586,
1.584, 1.586, 1.587, 1.589, 1.591, 1.591, 1.592, 1.592, 1.592, 1.592, 1.591, 1.591, 1.591, 1.589, 1.589, 1.586,
1.583, 1.585, 1.587, 1.588, 1.589, 1.591, 1.591, 1.592, 1.592, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.586,
1.583, 1.584, 1.586, 1.588, 1.589, 1.589, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.585,
1.581, 1.584, 1.586, 1.587, 1.588, 1.588, 1.589, 1.591, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.589, 1.585,
1.581, 1.583, 1.584, 1.586, 1.587, 1.588, 1.589, 1.589, 1.591, 1.591, 1.591, 1.591, 1.591, 1.589, 1.589, 1.585,
1.579, 1.581, 1.583, 1.584, 1.586, 1.586, 1.588, 1.589, 1.589, 1.589, 1.589, 1.589, 1.589, 1.589, 1.587, 1.584,
1.578, 1.579, 1.581, 1.583, 1.584, 1.585, 1.586, 1.587, 1.588, 1.588, 1.588, 1.588, 1.588, 1.587, 1.585, 1.583,
1.577, 1.578, 1.579, 1.582, 1.583, 1.584, 1.585, 1.586, 1.586, 1.587, 1.587, 1.587, 1.586, 1.586, 1.584, 1.583
]
}
],
"luminance_lut":
[
1.112, 1.098, 1.078, 1.062, 1.049, 1.039, 1.031, 1.027, 1.026, 1.027, 1.034, 1.043, 1.054, 1.069, 1.087, 1.096,
1.106, 1.091, 1.073, 1.056, 1.042, 1.032, 1.025, 1.021, 1.021, 1.022, 1.027, 1.036, 1.047, 1.061, 1.077, 1.088,
1.101, 1.085, 1.066, 1.049, 1.035, 1.026, 1.019, 1.013, 1.013, 1.015, 1.021, 1.028, 1.039, 1.052, 1.069, 1.083,
1.098, 1.081, 1.059, 1.045, 1.031, 1.021, 1.013, 1.007, 1.007, 1.009, 1.014, 1.021, 1.033, 1.046, 1.063, 1.081,
1.097, 1.076, 1.057, 1.041, 1.027, 1.016, 1.007, 1.004, 1.002, 1.005, 1.009, 1.017, 1.028, 1.043, 1.061, 1.077,
1.096, 1.075, 1.054, 1.039, 1.025, 1.014, 1.005, 1.001, 1.001, 1.002, 1.006, 1.015, 1.027, 1.041, 1.058, 1.076,
1.096, 1.074, 1.054, 1.039, 1.025, 1.013, 1.005, 1.001, 1.001, 1.001, 1.006, 1.015, 1.026, 1.041, 1.058, 1.076,
1.096, 1.075, 1.056, 1.041, 1.026, 1.014, 1.007, 1.003, 1.002, 1.004, 1.008, 1.016, 1.028, 1.041, 1.059, 1.076,
1.096, 1.079, 1.059, 1.044, 1.029, 1.018, 1.011, 1.007, 1.005, 1.008, 1.012, 1.019, 1.031, 1.044, 1.061, 1.077,
1.101, 1.084, 1.065, 1.049, 1.035, 1.024, 1.017, 1.011, 1.011, 1.012, 1.018, 1.025, 1.036, 1.051, 1.068, 1.081,
1.106, 1.092, 1.072, 1.055, 1.042, 1.033, 1.024, 1.019, 1.018, 1.019, 1.025, 1.032, 1.044, 1.058, 1.076, 1.088,
1.113, 1.097, 1.079, 1.063, 1.049, 1.039, 1.031, 1.025, 1.025, 1.025, 1.031, 1.039, 1.051, 1.065, 1.083, 1.094
],
"sigma": 0.00047,
"sigma_Cb": 0.00056
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2500,
"ccm":
[
1.95054, -0.57435, -0.37619,
-0.46945, 1.86661, -0.39716,
0.07977, -1.14072, 2.06095
]
},
{
"ct": 2800,
"ccm":
[
1.94104, -0.60261, -0.33844,
-0.43162, 1.85422, -0.42261,
0.03799, -0.95022, 1.91222
]
},
{
"ct": 2900,
"ccm":
[
1.91828, -0.59569, -0.32258,
-0.51902, 2.09091, -0.57189,
-0.03324, -0.73462, 1.76785
]
},
{
"ct": 3620,
"ccm":
[
1.97199, -0.66403, -0.30797,
-0.46411, 2.02612, -0.56201,
-0.07764, -0.61178, 1.68942
]
},
{
"ct": 4560,
"ccm":
[
2.15256, -0.84787, -0.30469,
-0.48422, 2.28962, -0.80541,
-0.15113, -0.53014, 1.68127
]
},
{
"ct": 5600,
"ccm":
[
2.04576, -0.74771, -0.29805,
-0.36332, 1.98993, -0.62662,
-0.09328, -0.46543, 1.55871
]
},
{
"ct": 7400,
"ccm":
[
2.37532, -0.83069, -0.54462,
-0.48279, 2.84309, -1.36031,
-0.21178, -0.66532, 1.87709
]
}
]
}
},
{
"rpi.sharpen":
{
"threshold": 0.1,
"strength": 1.0,
"limit": 0.18
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx477_noir.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 27242,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 830,
"reference_Y": 17755
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.767
}
},
{
"rpi.geq":
{
"offset": 204,
"slope": 0.01078
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"bayes": 0
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 66666 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 33333 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 8.0 ]
},
"long":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.3,
1000, 0.3
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
],
"shadows": [
{
"bound": "LOWER",
"q_lo": 0.0,
"q_hi": 0.5,
"y_target":
[
0, 0.17,
1000, 0.17
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 2960,
"table":
[
2.088, 2.086, 2.082, 2.081, 2.077, 2.071, 2.068, 2.068, 2.072, 2.073, 2.075, 2.078, 2.084, 2.092, 2.095, 2.098,
2.086, 2.084, 2.079, 2.078, 2.075, 2.068, 2.064, 2.063, 2.068, 2.071, 2.072, 2.075, 2.081, 2.089, 2.092, 2.094,
2.083, 2.081, 2.077, 2.072, 2.069, 2.062, 2.059, 2.059, 2.063, 2.067, 2.069, 2.072, 2.079, 2.088, 2.089, 2.089,
2.081, 2.077, 2.072, 2.068, 2.065, 2.058, 2.055, 2.054, 2.057, 2.062, 2.066, 2.069, 2.077, 2.084, 2.086, 2.086,
2.078, 2.075, 2.069, 2.065, 2.061, 2.055, 2.052, 2.049, 2.051, 2.056, 2.062, 2.065, 2.072, 2.079, 2.081, 2.079,
2.079, 2.075, 2.069, 2.064, 2.061, 2.053, 2.049, 2.046, 2.049, 2.051, 2.057, 2.062, 2.069, 2.075, 2.077, 2.075,
2.082, 2.079, 2.072, 2.065, 2.061, 2.054, 2.049, 2.047, 2.049, 2.051, 2.056, 2.061, 2.066, 2.073, 2.073, 2.069,
2.086, 2.082, 2.075, 2.068, 2.062, 2.054, 2.051, 2.049, 2.051, 2.052, 2.056, 2.061, 2.066, 2.073, 2.073, 2.072,
2.088, 2.086, 2.079, 2.074, 2.066, 2.057, 2.051, 2.051, 2.054, 2.055, 2.056, 2.061, 2.067, 2.072, 2.073, 2.072,
2.091, 2.087, 2.079, 2.075, 2.068, 2.057, 2.052, 2.052, 2.056, 2.055, 2.055, 2.059, 2.066, 2.072, 2.072, 2.072,
2.093, 2.088, 2.081, 2.077, 2.069, 2.059, 2.054, 2.054, 2.057, 2.056, 2.056, 2.058, 2.066, 2.072, 2.073, 2.073,
2.095, 2.091, 2.084, 2.078, 2.075, 2.067, 2.057, 2.057, 2.059, 2.059, 2.058, 2.059, 2.068, 2.073, 2.075, 2.078
]
},
{
"ct": 4850,
"table":
[
2.973, 2.968, 2.956, 2.943, 2.941, 2.932, 2.923, 2.921, 2.924, 2.929, 2.931, 2.939, 2.953, 2.965, 2.966, 2.976,
2.969, 2.962, 2.951, 2.941, 2.934, 2.928, 2.919, 2.918, 2.919, 2.923, 2.927, 2.933, 2.945, 2.957, 2.962, 2.962,
2.964, 2.956, 2.944, 2.932, 2.929, 2.924, 2.915, 2.914, 2.915, 2.919, 2.924, 2.928, 2.941, 2.952, 2.958, 2.959,
2.957, 2.951, 2.939, 2.928, 2.924, 2.919, 2.913, 2.911, 2.911, 2.915, 2.919, 2.925, 2.936, 2.947, 2.952, 2.953,
2.954, 2.947, 2.935, 2.924, 2.919, 2.915, 2.908, 2.906, 2.906, 2.907, 2.914, 2.921, 2.932, 2.941, 2.943, 2.942,
2.953, 2.946, 2.932, 2.921, 2.916, 2.911, 2.904, 2.902, 2.901, 2.904, 2.909, 2.919, 2.926, 2.937, 2.939, 2.939,
2.953, 2.947, 2.932, 2.918, 2.915, 2.909, 2.903, 2.901, 2.901, 2.906, 2.911, 2.918, 2.924, 2.936, 2.936, 2.932,
2.956, 2.948, 2.934, 2.919, 2.916, 2.908, 2.903, 2.901, 2.902, 2.907, 2.909, 2.917, 2.926, 2.936, 2.939, 2.939,
2.957, 2.951, 2.936, 2.923, 2.917, 2.907, 2.904, 2.901, 2.902, 2.908, 2.911, 2.919, 2.929, 2.939, 2.942, 2.942,
2.961, 2.951, 2.936, 2.922, 2.918, 2.906, 2.904, 2.901, 2.901, 2.907, 2.911, 2.921, 2.931, 2.941, 2.942, 2.944,
2.964, 2.954, 2.936, 2.924, 2.918, 2.909, 2.905, 2.905, 2.905, 2.907, 2.912, 2.923, 2.933, 2.942, 2.944, 2.944,
2.964, 2.958, 2.943, 2.927, 2.921, 2.914, 2.909, 2.907, 2.907, 2.912, 2.916, 2.928, 2.936, 2.944, 2.947, 2.952
]
},
{
"ct": 5930,
"table":
[
3.312, 3.308, 3.301, 3.294, 3.288, 3.277, 3.268, 3.261, 3.259, 3.261, 3.267, 3.273, 3.285, 3.301, 3.303, 3.312,
3.308, 3.304, 3.294, 3.291, 3.283, 3.271, 3.263, 3.259, 3.257, 3.258, 3.261, 3.268, 3.278, 3.293, 3.299, 3.299,
3.302, 3.296, 3.288, 3.282, 3.276, 3.267, 3.259, 3.254, 3.252, 3.253, 3.256, 3.261, 3.273, 3.289, 3.292, 3.292,
3.296, 3.289, 3.282, 3.276, 3.269, 3.263, 3.256, 3.251, 3.248, 3.249, 3.251, 3.257, 3.268, 3.279, 3.284, 3.284,
3.292, 3.285, 3.279, 3.271, 3.264, 3.257, 3.249, 3.243, 3.241, 3.241, 3.246, 3.252, 3.261, 3.274, 3.275, 3.273,
3.291, 3.285, 3.276, 3.268, 3.259, 3.251, 3.242, 3.239, 3.236, 3.238, 3.244, 3.248, 3.258, 3.268, 3.269, 3.265,
3.294, 3.288, 3.275, 3.266, 3.257, 3.248, 3.239, 3.238, 3.237, 3.238, 3.243, 3.246, 3.255, 3.264, 3.264, 3.257,
3.297, 3.293, 3.279, 3.268, 3.258, 3.249, 3.238, 3.237, 3.239, 3.239, 3.243, 3.245, 3.255, 3.264, 3.264, 3.263,
3.301, 3.295, 3.281, 3.271, 3.259, 3.248, 3.237, 3.237, 3.239, 3.241, 3.243, 3.246, 3.257, 3.265, 3.266, 3.264,
3.306, 3.295, 3.279, 3.271, 3.261, 3.247, 3.235, 3.234, 3.239, 3.239, 3.243, 3.247, 3.258, 3.265, 3.265, 3.264,
3.308, 3.297, 3.279, 3.272, 3.261, 3.249, 3.239, 3.239, 3.241, 3.243, 3.245, 3.248, 3.261, 3.265, 3.266, 3.265,
3.309, 3.301, 3.286, 3.276, 3.267, 3.256, 3.246, 3.242, 3.244, 3.244, 3.249, 3.253, 3.263, 3.267, 3.271, 3.274
]
}
],
"calibrations_Cb": [
{
"ct": 2960,
"table":
[
2.133, 2.134, 2.139, 2.143, 2.148, 2.155, 2.158, 2.158, 2.158, 2.161, 2.161, 2.162, 2.159, 2.156, 2.152, 2.151,
2.132, 2.133, 2.135, 2.142, 2.147, 2.153, 2.158, 2.158, 2.158, 2.158, 2.159, 2.159, 2.157, 2.154, 2.151, 2.148,
2.133, 2.133, 2.135, 2.142, 2.149, 2.154, 2.158, 2.158, 2.157, 2.156, 2.158, 2.157, 2.155, 2.153, 2.148, 2.146,
2.133, 2.133, 2.138, 2.145, 2.149, 2.154, 2.158, 2.159, 2.158, 2.155, 2.157, 2.156, 2.153, 2.149, 2.146, 2.144,
2.133, 2.134, 2.139, 2.146, 2.149, 2.154, 2.158, 2.159, 2.159, 2.156, 2.154, 2.154, 2.149, 2.145, 2.143, 2.139,
2.135, 2.135, 2.139, 2.146, 2.151, 2.155, 2.158, 2.159, 2.158, 2.156, 2.153, 2.151, 2.146, 2.143, 2.139, 2.136,
2.135, 2.135, 2.138, 2.145, 2.151, 2.154, 2.157, 2.158, 2.157, 2.156, 2.153, 2.151, 2.147, 2.143, 2.141, 2.137,
2.135, 2.134, 2.135, 2.141, 2.149, 2.154, 2.157, 2.157, 2.157, 2.157, 2.157, 2.153, 2.149, 2.146, 2.142, 2.139,
2.132, 2.133, 2.135, 2.139, 2.148, 2.153, 2.158, 2.159, 2.159, 2.161, 2.161, 2.157, 2.154, 2.149, 2.144, 2.141,
2.132, 2.133, 2.135, 2.141, 2.149, 2.155, 2.161, 2.161, 2.162, 2.162, 2.163, 2.159, 2.154, 2.149, 2.144, 2.138,
2.136, 2.136, 2.137, 2.143, 2.149, 2.156, 2.162, 2.163, 2.162, 2.163, 2.164, 2.161, 2.157, 2.152, 2.146, 2.138,
2.137, 2.137, 2.141, 2.147, 2.152, 2.157, 2.162, 2.162, 2.159, 2.161, 2.162, 2.162, 2.157, 2.152, 2.148, 2.148
]
},
{
"ct": 4850,
"table":
[
1.463, 1.464, 1.471, 1.478, 1.479, 1.483, 1.484, 1.486, 1.486, 1.484, 1.483, 1.481, 1.478, 1.475, 1.471, 1.468,
1.463, 1.463, 1.468, 1.476, 1.479, 1.482, 1.484, 1.487, 1.486, 1.484, 1.483, 1.482, 1.478, 1.473, 1.469, 1.468,
1.463, 1.464, 1.468, 1.476, 1.479, 1.483, 1.484, 1.486, 1.486, 1.485, 1.484, 1.482, 1.477, 1.473, 1.469, 1.468,
1.463, 1.464, 1.469, 1.477, 1.481, 1.483, 1.485, 1.487, 1.487, 1.485, 1.485, 1.482, 1.478, 1.474, 1.469, 1.468,
1.465, 1.465, 1.471, 1.478, 1.481, 1.484, 1.486, 1.488, 1.488, 1.487, 1.485, 1.482, 1.477, 1.472, 1.468, 1.467,
1.465, 1.466, 1.472, 1.479, 1.482, 1.485, 1.486, 1.488, 1.488, 1.486, 1.484, 1.479, 1.475, 1.472, 1.468, 1.466,
1.466, 1.466, 1.472, 1.478, 1.482, 1.484, 1.485, 1.488, 1.487, 1.485, 1.483, 1.479, 1.475, 1.472, 1.469, 1.468,
1.465, 1.466, 1.469, 1.476, 1.481, 1.485, 1.485, 1.486, 1.486, 1.485, 1.483, 1.479, 1.477, 1.474, 1.471, 1.469,
1.464, 1.465, 1.469, 1.476, 1.481, 1.484, 1.485, 1.487, 1.487, 1.486, 1.485, 1.481, 1.478, 1.475, 1.471, 1.469,
1.463, 1.464, 1.469, 1.477, 1.481, 1.485, 1.485, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.471, 1.468,
1.464, 1.465, 1.471, 1.478, 1.482, 1.486, 1.486, 1.488, 1.488, 1.487, 1.486, 1.481, 1.478, 1.475, 1.472, 1.468,
1.465, 1.466, 1.472, 1.481, 1.483, 1.487, 1.487, 1.488, 1.488, 1.486, 1.485, 1.481, 1.479, 1.476, 1.473, 1.472
]
},
{
"ct": 5930,
"table":
[
1.443, 1.444, 1.448, 1.453, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.466, 1.462, 1.457, 1.454, 1.451,
1.443, 1.444, 1.445, 1.451, 1.459, 1.463, 1.465, 1.467, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.451,
1.444, 1.444, 1.445, 1.451, 1.459, 1.463, 1.466, 1.468, 1.469, 1.469, 1.467, 1.465, 1.461, 1.456, 1.452, 1.449,
1.444, 1.444, 1.447, 1.452, 1.459, 1.464, 1.467, 1.469, 1.471, 1.469, 1.467, 1.466, 1.461, 1.456, 1.452, 1.449,
1.444, 1.445, 1.448, 1.452, 1.459, 1.465, 1.469, 1.471, 1.471, 1.471, 1.468, 1.465, 1.461, 1.455, 1.451, 1.449,
1.445, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.471, 1.472, 1.469, 1.467, 1.465, 1.459, 1.455, 1.451, 1.447,
1.446, 1.446, 1.449, 1.453, 1.461, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.459, 1.455, 1.452, 1.449,
1.446, 1.446, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.469, 1.469, 1.467, 1.465, 1.461, 1.457, 1.454, 1.451,
1.444, 1.444, 1.447, 1.451, 1.459, 1.466, 1.469, 1.469, 1.471, 1.471, 1.468, 1.466, 1.462, 1.458, 1.454, 1.452,
1.444, 1.444, 1.448, 1.453, 1.459, 1.466, 1.469, 1.471, 1.472, 1.472, 1.468, 1.466, 1.462, 1.458, 1.454, 1.449,
1.446, 1.447, 1.449, 1.454, 1.461, 1.466, 1.471, 1.471, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.455, 1.449,
1.447, 1.447, 1.452, 1.457, 1.462, 1.468, 1.472, 1.472, 1.471, 1.471, 1.468, 1.466, 1.462, 1.459, 1.456, 1.455
]
}
],
"luminance_lut":
[
1.548, 1.499, 1.387, 1.289, 1.223, 1.183, 1.164, 1.154, 1.153, 1.169, 1.211, 1.265, 1.345, 1.448, 1.581, 1.619,
1.513, 1.412, 1.307, 1.228, 1.169, 1.129, 1.105, 1.098, 1.103, 1.127, 1.157, 1.209, 1.272, 1.361, 1.481, 1.583,
1.449, 1.365, 1.257, 1.175, 1.124, 1.085, 1.062, 1.054, 1.059, 1.079, 1.113, 1.151, 1.211, 1.293, 1.407, 1.488,
1.424, 1.324, 1.222, 1.139, 1.089, 1.056, 1.034, 1.031, 1.034, 1.049, 1.075, 1.115, 1.164, 1.241, 1.351, 1.446,
1.412, 1.297, 1.203, 1.119, 1.069, 1.039, 1.021, 1.016, 1.022, 1.032, 1.052, 1.086, 1.135, 1.212, 1.321, 1.439,
1.406, 1.287, 1.195, 1.115, 1.059, 1.028, 1.014, 1.012, 1.015, 1.026, 1.041, 1.074, 1.125, 1.201, 1.302, 1.425,
1.406, 1.294, 1.205, 1.126, 1.062, 1.031, 1.013, 1.009, 1.011, 1.019, 1.042, 1.079, 1.129, 1.203, 1.302, 1.435,
1.415, 1.318, 1.229, 1.146, 1.076, 1.039, 1.019, 1.014, 1.017, 1.031, 1.053, 1.093, 1.144, 1.219, 1.314, 1.436,
1.435, 1.348, 1.246, 1.164, 1.094, 1.059, 1.036, 1.032, 1.037, 1.049, 1.072, 1.114, 1.167, 1.257, 1.343, 1.462,
1.471, 1.385, 1.278, 1.189, 1.124, 1.084, 1.064, 1.061, 1.069, 1.078, 1.101, 1.146, 1.207, 1.298, 1.415, 1.496,
1.522, 1.436, 1.323, 1.228, 1.169, 1.118, 1.101, 1.094, 1.099, 1.113, 1.146, 1.194, 1.265, 1.353, 1.474, 1.571,
1.578, 1.506, 1.378, 1.281, 1.211, 1.156, 1.135, 1.134, 1.139, 1.158, 1.194, 1.251, 1.327, 1.427, 1.559, 1.611
],
"sigma": 0.00121,
"sigma_Cb": 0.00115
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2360,
"ccm":
[
1.66078, -0.23588, -0.42491,
-0.47456, 1.82763, -0.35307,
-0.00545, -1.44729, 2.45273
]
},
{
"ct": 2870,
"ccm":
[
1.78373, -0.55344, -0.23029,
-0.39951, 1.69701, -0.29751,
0.01986, -1.06525, 2.04539
]
},
{
"ct": 2970,
"ccm":
[
1.73511, -0.56973, -0.16537,
-0.36338, 1.69878, -0.33539,
-0.02354, -0.76813, 1.79168
]
},
{
"ct": 3000,
"ccm":
[
2.06374, -0.92218, -0.14156,
-0.41721, 1.69289, -0.27568,
-0.00554, -0.92741, 1.93295
]
},
{
"ct": 3700,
"ccm":
[
2.13792, -1.08136, -0.05655,
-0.34739, 1.58989, -0.24249,
-0.00349, -0.76789, 1.77138
]
},
{
"ct": 3870,
"ccm":
[
1.83834, -0.70528, -0.13307,
-0.30499, 1.60523, -0.30024,
-0.05701, -0.58313, 1.64014
]
},
{
"ct": 4000,
"ccm":
[
2.15741, -1.10295, -0.05447,
-0.34631, 1.61158, -0.26528,
-0.02723, -0.70288, 1.73011
]
},
{
"ct": 4400,
"ccm":
[
2.05729, -0.95007, -0.10723,
-0.41712, 1.78606, -0.36894,
-0.11899, -0.55727, 1.67626
]
},
{
"ct": 4715,
"ccm":
[
1.90255, -0.77478, -0.12777,
-0.31338, 1.88197, -0.56858,
-0.06001, -0.61785, 1.67786
]
},
{
"ct": 5920,
"ccm":
[
1.98691, -0.84671, -0.14019,
-0.26581, 1.70615, -0.44035,
-0.09532, -0.47332, 1.56864
]
},
{
"ct": 9050,
"ccm":
[
2.09255, -0.76541, -0.32714,
-0.28973, 2.27462, -0.98489,
-0.17299, -0.61275, 1.78574
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
}
|
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx708.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 10672,
"reference_gain": 1.12,
"reference_aperture": 1.0,
"reference_lux": 977,
"reference_Y": 8627
}
},
{
"rpi.noise":
{
"reference_constant": 16.0,
"reference_slope": 4.0
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.00287
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 1,
"ct_curve":
[
2498.0, 0.8733, 0.2606,
2821.0, 0.7707, 0.3245,
2925.0, 0.7338, 0.3499,
2926.0, 0.7193, 0.3603,
2951.0, 0.7144, 0.3639,
2954.0, 0.7111, 0.3663,
3578.0, 0.6038, 0.4516,
3717.0, 0.5861, 0.4669,
3784.0, 0.5786, 0.4737,
4485.0, 0.5113, 0.5368,
4615.0, 0.4994, 0.5486,
4671.0, 0.4927, 0.5554,
5753.0, 0.4274, 0.6246,
5773.0, 0.4265, 0.6256,
7433.0, 0.3723, 0.6881
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.03148,
"transverse_neg": 0.03061
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.562, 1.566, 1.566, 1.556, 1.533, 1.506, 1.475, 1.475, 1.475, 1.475, 1.506, 1.533, 1.555, 1.563, 1.562, 1.555,
1.563, 1.564, 1.561, 1.538, 1.508, 1.482, 1.449, 1.436, 1.436, 1.449, 1.481, 1.508, 1.537, 1.557, 1.558, 1.557,
1.564, 1.563, 1.554, 1.522, 1.482, 1.449, 1.421, 1.403, 1.403, 1.419, 1.449, 1.481, 1.519, 1.549, 1.557, 1.559,
1.564, 1.563, 1.545, 1.506, 1.462, 1.421, 1.403, 1.378, 1.378, 1.402, 1.419, 1.459, 1.503, 1.541, 1.557, 1.559,
1.564, 1.562, 1.537, 1.494, 1.447, 1.404, 1.378, 1.364, 1.364, 1.377, 1.402, 1.444, 1.491, 1.532, 1.556, 1.559,
1.564, 1.559, 1.532, 1.487, 1.438, 1.395, 1.365, 1.359, 1.359, 1.364, 1.393, 1.436, 1.484, 1.527, 1.555, 1.558,
1.564, 1.559, 1.532, 1.487, 1.438, 1.395, 1.365, 1.356, 1.356, 1.364, 1.393, 1.436, 1.484, 1.527, 1.554, 1.557,
1.564, 1.561, 1.536, 1.492, 1.444, 1.402, 1.374, 1.364, 1.363, 1.373, 1.401, 1.442, 1.489, 1.531, 1.554, 1.557,
1.564, 1.563, 1.544, 1.504, 1.458, 1.418, 1.397, 1.374, 1.374, 1.395, 1.416, 1.456, 1.501, 1.538, 1.556, 1.557,
1.564, 1.562, 1.551, 1.518, 1.477, 1.441, 1.418, 1.397, 1.397, 1.416, 1.438, 1.474, 1.514, 1.546, 1.556, 1.556,
1.562, 1.562, 1.558, 1.534, 1.499, 1.476, 1.441, 1.426, 1.426, 1.438, 1.473, 1.496, 1.531, 1.552, 1.556, 1.555,
1.561, 1.564, 1.564, 1.552, 1.525, 1.497, 1.466, 1.461, 1.461, 1.464, 1.495, 1.523, 1.548, 1.556, 1.556, 1.552
]
},
{
"ct": 5000,
"table":
[
2.609, 2.616, 2.617, 2.607, 2.573, 2.527, 2.483, 2.481, 2.481, 2.483, 2.529, 2.573, 2.604, 2.613, 2.613, 2.604,
2.609, 2.615, 2.608, 2.576, 2.533, 2.489, 2.439, 2.418, 2.418, 2.439, 2.491, 2.532, 2.577, 2.605, 2.609, 2.607,
2.611, 2.611, 2.597, 2.551, 2.489, 2.439, 2.391, 2.364, 2.364, 2.391, 2.439, 2.491, 2.551, 2.592, 2.607, 2.609,
2.612, 2.608, 2.583, 2.526, 2.457, 2.391, 2.362, 2.318, 2.318, 2.362, 2.391, 2.458, 2.526, 2.581, 2.607, 2.611,
2.612, 2.604, 2.571, 2.507, 2.435, 2.362, 2.317, 2.293, 2.294, 2.318, 2.363, 2.434, 2.508, 2.568, 2.604, 2.612,
2.611, 2.602, 2.564, 2.496, 2.419, 2.349, 2.293, 2.284, 2.284, 2.294, 2.347, 2.421, 2.497, 2.562, 2.603, 2.611,
2.609, 2.601, 2.564, 2.496, 2.419, 2.349, 2.293, 2.278, 2.278, 2.294, 2.347, 2.421, 2.497, 2.562, 2.602, 2.609,
2.609, 2.602, 2.568, 2.503, 2.429, 2.361, 2.311, 2.292, 2.292, 2.309, 2.357, 2.429, 2.504, 2.567, 2.602, 2.609,
2.606, 2.604, 2.579, 2.519, 2.449, 2.384, 2.348, 2.311, 2.311, 2.346, 2.383, 2.449, 2.521, 2.577, 2.604, 2.608,
2.604, 2.603, 2.586, 2.537, 2.474, 2.418, 2.384, 2.348, 2.348, 2.383, 2.417, 2.476, 2.538, 2.586, 2.601, 2.603,
2.603, 2.605, 2.596, 2.561, 2.508, 2.474, 2.418, 2.396, 2.396, 2.417, 2.474, 2.511, 2.562, 2.596, 2.603, 2.602,
2.601, 2.607, 2.606, 2.589, 2.549, 2.507, 2.456, 2.454, 2.454, 2.458, 2.508, 2.554, 2.594, 2.605, 2.605, 2.602
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
3.221, 3.226, 3.231, 3.236, 3.239, 3.243, 3.245, 3.247, 3.249, 3.253, 3.255, 3.254, 3.253, 3.242, 3.235, 3.226,
3.225, 3.231, 3.235, 3.238, 3.241, 3.244, 3.246, 3.247, 3.249, 3.254, 3.256, 3.255, 3.252, 3.248, 3.241, 3.232,
3.226, 3.234, 3.239, 3.243, 3.243, 3.245, 3.247, 3.248, 3.251, 3.255, 3.256, 3.256, 3.254, 3.249, 3.244, 3.236,
3.232, 3.238, 3.245, 3.245, 3.246, 3.247, 3.248, 3.251, 3.251, 3.256, 3.257, 3.257, 3.256, 3.254, 3.249, 3.239,
3.232, 3.243, 3.246, 3.246, 3.246, 3.247, 3.248, 3.251, 3.253, 3.257, 3.258, 3.258, 3.257, 3.256, 3.254, 3.239,
3.232, 3.242, 3.246, 3.247, 3.246, 3.246, 3.248, 3.251, 3.252, 3.253, 3.256, 3.255, 3.255, 3.254, 3.251, 3.239,
3.233, 3.241, 3.244, 3.245, 3.244, 3.245, 3.246, 3.249, 3.251, 3.252, 3.253, 3.252, 3.252, 3.252, 3.249, 3.238,
3.238, 3.241, 3.246, 3.246, 3.245, 3.245, 3.247, 3.249, 3.251, 3.252, 3.253, 3.253, 3.252, 3.252, 3.249, 3.239,
3.235, 3.241, 3.245, 3.245, 3.245, 3.245, 3.246, 3.247, 3.251, 3.254, 3.253, 3.255, 3.256, 3.255, 3.251, 3.241,
3.226, 3.235, 3.241, 3.241, 3.241, 3.241, 3.243, 3.245, 3.246, 3.252, 3.253, 3.254, 3.256, 3.254, 3.241, 3.237,
3.205, 3.213, 3.213, 3.214, 3.214, 3.214, 3.214, 3.213, 3.213, 3.216, 3.218, 3.216, 3.214, 3.213, 3.211, 3.208,
3.205, 3.205, 3.212, 3.212, 3.212, 3.213, 3.211, 3.211, 3.211, 3.213, 3.216, 3.214, 3.213, 3.211, 3.208, 3.196
]
},
{
"ct": 5000,
"table":
[
1.645, 1.646, 1.649, 1.653, 1.654, 1.657, 1.659, 1.661, 1.663, 1.662, 1.661, 1.659, 1.656, 1.651, 1.645, 1.642,
1.646, 1.649, 1.652, 1.654, 1.656, 1.659, 1.662, 1.663, 1.664, 1.664, 1.662, 1.661, 1.657, 1.653, 1.649, 1.644,
1.648, 1.652, 1.654, 1.656, 1.658, 1.662, 1.665, 1.668, 1.668, 1.668, 1.665, 1.662, 1.658, 1.655, 1.652, 1.646,
1.649, 1.653, 1.656, 1.658, 1.661, 1.665, 1.667, 1.671, 1.673, 1.671, 1.668, 1.663, 1.659, 1.656, 1.654, 1.647,
1.649, 1.655, 1.657, 1.659, 1.661, 1.666, 1.671, 1.674, 1.675, 1.673, 1.671, 1.664, 1.659, 1.656, 1.654, 1.648,
1.649, 1.654, 1.656, 1.659, 1.661, 1.666, 1.673, 1.676, 1.676, 1.675, 1.671, 1.664, 1.659, 1.656, 1.654, 1.648,
1.649, 1.654, 1.656, 1.658, 1.659, 1.665, 1.672, 1.675, 1.675, 1.674, 1.668, 1.662, 1.658, 1.655, 1.654, 1.646,
1.652, 1.655, 1.657, 1.659, 1.661, 1.665, 1.671, 1.673, 1.673, 1.672, 1.668, 1.662, 1.658, 1.655, 1.654, 1.647,
1.652, 1.655, 1.657, 1.659, 1.661, 1.664, 1.667, 1.671, 1.672, 1.668, 1.666, 1.662, 1.659, 1.656, 1.654, 1.647,
1.647, 1.652, 1.655, 1.656, 1.657, 1.661, 1.664, 1.665, 1.665, 1.665, 1.663, 1.661, 1.657, 1.655, 1.647, 1.647,
1.639, 1.642, 1.644, 1.645, 1.646, 1.648, 1.648, 1.648, 1.649, 1.649, 1.649, 1.646, 1.645, 1.642, 1.639, 1.636,
1.639, 1.641, 1.642, 1.644, 1.645, 1.646, 1.647, 1.647, 1.648, 1.648, 1.647, 1.645, 1.642, 1.639, 1.636, 1.633
]
}
],
"luminance_lut":
[
2.644, 2.396, 2.077, 1.863, 1.682, 1.535, 1.392, 1.382, 1.382, 1.382, 1.515, 1.657, 1.826, 2.035, 2.351, 2.604,
2.497, 2.229, 1.947, 1.733, 1.539, 1.424, 1.296, 1.249, 1.249, 1.285, 1.401, 1.519, 1.699, 1.908, 2.183, 2.456,
2.389, 2.109, 1.848, 1.622, 1.424, 1.296, 1.201, 1.146, 1.146, 1.188, 1.285, 1.401, 1.591, 1.811, 2.065, 2.347,
2.317, 2.026, 1.771, 1.535, 1.339, 1.201, 1.145, 1.069, 1.069, 1.134, 1.188, 1.318, 1.505, 1.734, 1.983, 2.273,
2.276, 1.972, 1.715, 1.474, 1.281, 1.148, 1.069, 1.033, 1.024, 1.065, 1.134, 1.262, 1.446, 1.679, 1.929, 2.233,
2.268, 1.941, 1.682, 1.441, 1.251, 1.119, 1.033, 1.013, 1.013, 1.024, 1.105, 1.231, 1.415, 1.649, 1.898, 2.227,
2.268, 1.941, 1.682, 1.441, 1.251, 1.119, 1.033, 1.001, 1.001, 1.024, 1.105, 1.231, 1.415, 1.649, 1.898, 2.227,
2.268, 1.951, 1.694, 1.456, 1.265, 1.131, 1.044, 1.026, 1.019, 1.039, 1.118, 1.246, 1.429, 1.663, 1.912, 2.227,
2.291, 1.992, 1.738, 1.505, 1.311, 1.175, 1.108, 1.044, 1.041, 1.106, 1.161, 1.292, 1.478, 1.707, 1.955, 2.252,
2.347, 2.058, 1.803, 1.581, 1.384, 1.245, 1.175, 1.108, 1.108, 1.161, 1.239, 1.364, 1.551, 1.773, 2.023, 2.311,
2.438, 2.156, 1.884, 1.674, 1.484, 1.373, 1.245, 1.199, 1.199, 1.239, 1.363, 1.463, 1.647, 1.858, 2.123, 2.406,
2.563, 2.305, 1.998, 1.792, 1.615, 1.472, 1.339, 1.322, 1.322, 1.326, 1.456, 1.593, 1.767, 1.973, 2.273, 2.532
],
"sigma": 0.00178,
"sigma_Cb": 0.00217
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2964,
"ccm":
[
1.72129, -0.45961, -0.26169,
-0.30042, 1.56924, -0.26882,
0.15133, -1.13293, 1.98161
]
},
{
"ct": 3610,
"ccm":
[
1.54474, -0.35082, -0.19391,
-0.36989, 1.67926, -0.30936,
-0.00524, -0.55197, 1.55722
]
},
{
"ct": 4640,
"ccm":
[
1.52972, -0.35168, -0.17804,
-0.28309, 1.67098, -0.38788,
0.01695, -0.57209, 1.55515
]
},
{
"ct": 5910,
"ccm":
[
1.56879, -0.42159, -0.14719,
-0.27275, 1.59354, -0.32079,
-0.02862, -0.40662, 1.43525
]
},
{
"ct": 7590,
"ccm":
[
1.41424, -0.21092, -0.20332,
-0.17646, 1.71734, -0.54087,
0.01297, -0.63111, 1.61814
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.af":
{
"ranges":
{
"normal":
{
"min": 0.0,
"max": 12.0,
"default": 1.0
},
"macro":
{
"min": 3.0,
"max": 15.0,
"default": 4.0
}
},
"speeds":
{
"normal":
{
"step_coarse": 1.0,
"step_fine": 0.25,
"contrast_ratio": 0.75,
"pdaf_gain": -0.02,
"pdaf_squelch": 0.125,
"max_slew": 2.0,
"pdaf_frames": 20,
"dropout_frames": 6,
"step_frames": 4
}
},
"conf_epsilon": 8,
"conf_thresh": 16,
"conf_clip": 512,
"skip_frames": 5,
"map": [ 0.0, 445, 15.0, 925 ]
}
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx296_mono.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 3840
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 9998,
"reference_gain": 1.0,
"reference_aperture": 1.0,
"reference_lux": 823,
"reference_Y": 12396
}
},
{
"rpi.noise":
{
"reference_constant": 0,
"reference_slope": 2.753
}
},
{
"rpi.sdn": { }
},
{
"rpi.agc":
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 10000, 30000, 60000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.5,
1000, 0.5
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 0,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 4000,
"table":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
],
"calibrations_Cb": [
{
"ct": 4000,
"table":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
],
"luminance_lut":
[
1.308, 1.293, 1.228, 1.175, 1.139, 1.108, 1.092, 1.082, 1.082, 1.086, 1.097, 1.114, 1.149, 1.199, 1.279, 1.303,
1.293, 1.249, 1.199, 1.162, 1.136, 1.109, 1.087, 1.077, 1.072, 1.081, 1.095, 1.103, 1.133, 1.172, 1.225, 1.282,
1.251, 1.212, 1.186, 1.159, 1.129, 1.114, 1.102, 1.088, 1.088, 1.088, 1.095, 1.117, 1.123, 1.158, 1.198, 1.249,
1.223, 1.192, 1.177, 1.163, 1.147, 1.139, 1.132, 1.112, 1.111, 1.107, 1.113, 1.118, 1.139, 1.155, 1.186, 1.232,
1.207, 1.186, 1.171, 1.162, 1.168, 1.163, 1.153, 1.138, 1.129, 1.128, 1.132, 1.136, 1.149, 1.167, 1.189, 1.216,
1.198, 1.186, 1.176, 1.176, 1.177, 1.185, 1.171, 1.157, 1.146, 1.144, 1.146, 1.149, 1.161, 1.181, 1.201, 1.221,
1.203, 1.181, 1.176, 1.178, 1.191, 1.189, 1.188, 1.174, 1.159, 1.153, 1.158, 1.161, 1.169, 1.185, 1.211, 1.227,
1.211, 1.179, 1.177, 1.187, 1.194, 1.196, 1.194, 1.187, 1.176, 1.169, 1.171, 1.171, 1.175, 1.189, 1.214, 1.226,
1.219, 1.182, 1.184, 1.191, 1.195, 1.199, 1.197, 1.194, 1.188, 1.185, 1.179, 1.179, 1.182, 1.194, 1.212, 1.227,
1.237, 1.192, 1.194, 1.194, 1.198, 1.199, 1.198, 1.197, 1.196, 1.193, 1.189, 1.189, 1.192, 1.203, 1.214, 1.231,
1.282, 1.199, 1.199, 1.197, 1.199, 1.199, 1.192, 1.193, 1.193, 1.194, 1.196, 1.197, 1.206, 1.216, 1.228, 1.244,
1.309, 1.236, 1.204, 1.203, 1.202, 1.194, 1.194, 1.188, 1.192, 1.192, 1.199, 1.201, 1.212, 1.221, 1.235, 1.247
],
"sigma": 0.005,
"sigma_Cb": 0.005
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.sharpen":
{
"threshold": 0.1,
"strength": 1.0,
"limit": 0.18
}
}
]
} |
0 | repos/libcamera/src/ipa/rpi/vc4 | repos/libcamera/src/ipa/rpi/vc4/data/imx708_noir.json | {
"version": 2.0,
"target": "bcm2835",
"algorithms": [
{
"rpi.black_level":
{
"black_level": 4096
}
},
{
"rpi.dpc": { }
},
{
"rpi.lux":
{
"reference_shutter_speed": 10672,
"reference_gain": 1.12,
"reference_aperture": 1.0,
"reference_lux": 977,
"reference_Y": 8627
}
},
{
"rpi.noise":
{
"reference_constant": 16.0,
"reference_slope": 4.0
}
},
{
"rpi.geq":
{
"offset": 215,
"slope": 0.00287
}
},
{
"rpi.sdn": { }
},
{
"rpi.awb":
{
"priors": [
{
"lux": 0,
"prior":
[
2000, 1.0,
3000, 0.0,
13000, 0.0
]
},
{
"lux": 800,
"prior":
[
2000, 0.0,
6000, 2.0,
13000, 2.0
]
},
{
"lux": 1500,
"prior":
[
2000, 0.0,
4000, 1.0,
6000, 6.0,
6500, 7.0,
7000, 1.0,
13000, 1.0
]
}
],
"modes":
{
"auto":
{
"lo": 2500,
"hi": 8000
},
"incandescent":
{
"lo": 2500,
"hi": 3000
},
"tungsten":
{
"lo": 3000,
"hi": 3500
},
"fluorescent":
{
"lo": 4000,
"hi": 4700
},
"indoor":
{
"lo": 3000,
"hi": 5000
},
"daylight":
{
"lo": 5500,
"hi": 6500
},
"cloudy":
{
"lo": 7000,
"hi": 8600
}
},
"bayes": 0,
"ct_curve":
[
2498.0, 0.8733, 0.2606,
2821.0, 0.7707, 0.3245,
2925.0, 0.7338, 0.3499,
2926.0, 0.7193, 0.3603,
2951.0, 0.7144, 0.3639,
2954.0, 0.7111, 0.3663,
3578.0, 0.6038, 0.4516,
3717.0, 0.5861, 0.4669,
3784.0, 0.5786, 0.4737,
4485.0, 0.5113, 0.5368,
4615.0, 0.4994, 0.5486,
4671.0, 0.4927, 0.5554,
5753.0, 0.4274, 0.6246,
5773.0, 0.4265, 0.6256,
7433.0, 0.3723, 0.6881
],
"sensitivity_r": 1.05,
"sensitivity_b": 1.05,
"transverse_pos": 0.03148,
"transverse_neg": 0.03061
}
},
{
"rpi.agc":
{
"channels": [
{
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 0.125,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
},
{
"base_ev": 1.5,
"metering_modes":
{
"centre-weighted":
{
"weights":
[
3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
]
},
"spot":
{
"weights":
[
2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
},
"matrix":
{
"weights":
[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
}
},
"exposure_modes":
{
"normal":
{
"shutter": [ 100, 15000, 30000, 60000, 120000 ],
"gain": [ 1.0, 1.0, 2.0, 4.0, 6.0 ]
},
"short":
{
"shutter": [ 100, 5000, 10000, 20000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 6.0 ]
},
"long":
{
"shutter": [ 1000, 30000, 60000, 90000, 120000 ],
"gain": [ 1.0, 2.0, 4.0, 6.0, 12.0 ]
}
},
"constraint_modes":
{
"normal": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
}
],
"highlight": [
{
"bound": "LOWER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.2,
1000, 0.2
]
},
{
"bound": "UPPER",
"q_lo": 0.98,
"q_hi": 1.0,
"y_target":
[
0, 0.8,
1000, 0.8
]
}
]
},
"y_target":
[
0, 0.16,
1000, 0.165,
10000, 0.17
],
"startup_frames": 5,
"convergence_frames": 6,
"speed": 0.15
}
]
}
},
{
"rpi.alsc":
{
"omega": 1.3,
"n_iter": 100,
"luminance_strength": 0.5,
"calibrations_Cr": [
{
"ct": 3000,
"table":
[
1.562, 1.566, 1.566, 1.556, 1.533, 1.506, 1.475, 1.475, 1.475, 1.475, 1.506, 1.533, 1.555, 1.563, 1.562, 1.555,
1.563, 1.564, 1.561, 1.538, 1.508, 1.482, 1.449, 1.436, 1.436, 1.449, 1.481, 1.508, 1.537, 1.557, 1.558, 1.557,
1.564, 1.563, 1.554, 1.522, 1.482, 1.449, 1.421, 1.403, 1.403, 1.419, 1.449, 1.481, 1.519, 1.549, 1.557, 1.559,
1.564, 1.563, 1.545, 1.506, 1.462, 1.421, 1.403, 1.378, 1.378, 1.402, 1.419, 1.459, 1.503, 1.541, 1.557, 1.559,
1.564, 1.562, 1.537, 1.494, 1.447, 1.404, 1.378, 1.364, 1.364, 1.377, 1.402, 1.444, 1.491, 1.532, 1.556, 1.559,
1.564, 1.559, 1.532, 1.487, 1.438, 1.395, 1.365, 1.359, 1.359, 1.364, 1.393, 1.436, 1.484, 1.527, 1.555, 1.558,
1.564, 1.559, 1.532, 1.487, 1.438, 1.395, 1.365, 1.356, 1.356, 1.364, 1.393, 1.436, 1.484, 1.527, 1.554, 1.557,
1.564, 1.561, 1.536, 1.492, 1.444, 1.402, 1.374, 1.364, 1.363, 1.373, 1.401, 1.442, 1.489, 1.531, 1.554, 1.557,
1.564, 1.563, 1.544, 1.504, 1.458, 1.418, 1.397, 1.374, 1.374, 1.395, 1.416, 1.456, 1.501, 1.538, 1.556, 1.557,
1.564, 1.562, 1.551, 1.518, 1.477, 1.441, 1.418, 1.397, 1.397, 1.416, 1.438, 1.474, 1.514, 1.546, 1.556, 1.556,
1.562, 1.562, 1.558, 1.534, 1.499, 1.476, 1.441, 1.426, 1.426, 1.438, 1.473, 1.496, 1.531, 1.552, 1.556, 1.555,
1.561, 1.564, 1.564, 1.552, 1.525, 1.497, 1.466, 1.461, 1.461, 1.464, 1.495, 1.523, 1.548, 1.556, 1.556, 1.552
]
},
{
"ct": 5000,
"table":
[
2.609, 2.616, 2.617, 2.607, 2.573, 2.527, 2.483, 2.481, 2.481, 2.483, 2.529, 2.573, 2.604, 2.613, 2.613, 2.604,
2.609, 2.615, 2.608, 2.576, 2.533, 2.489, 2.439, 2.418, 2.418, 2.439, 2.491, 2.532, 2.577, 2.605, 2.609, 2.607,
2.611, 2.611, 2.597, 2.551, 2.489, 2.439, 2.391, 2.364, 2.364, 2.391, 2.439, 2.491, 2.551, 2.592, 2.607, 2.609,
2.612, 2.608, 2.583, 2.526, 2.457, 2.391, 2.362, 2.318, 2.318, 2.362, 2.391, 2.458, 2.526, 2.581, 2.607, 2.611,
2.612, 2.604, 2.571, 2.507, 2.435, 2.362, 2.317, 2.293, 2.294, 2.318, 2.363, 2.434, 2.508, 2.568, 2.604, 2.612,
2.611, 2.602, 2.564, 2.496, 2.419, 2.349, 2.293, 2.284, 2.284, 2.294, 2.347, 2.421, 2.497, 2.562, 2.603, 2.611,
2.609, 2.601, 2.564, 2.496, 2.419, 2.349, 2.293, 2.278, 2.278, 2.294, 2.347, 2.421, 2.497, 2.562, 2.602, 2.609,
2.609, 2.602, 2.568, 2.503, 2.429, 2.361, 2.311, 2.292, 2.292, 2.309, 2.357, 2.429, 2.504, 2.567, 2.602, 2.609,
2.606, 2.604, 2.579, 2.519, 2.449, 2.384, 2.348, 2.311, 2.311, 2.346, 2.383, 2.449, 2.521, 2.577, 2.604, 2.608,
2.604, 2.603, 2.586, 2.537, 2.474, 2.418, 2.384, 2.348, 2.348, 2.383, 2.417, 2.476, 2.538, 2.586, 2.601, 2.603,
2.603, 2.605, 2.596, 2.561, 2.508, 2.474, 2.418, 2.396, 2.396, 2.417, 2.474, 2.511, 2.562, 2.596, 2.603, 2.602,
2.601, 2.607, 2.606, 2.589, 2.549, 2.507, 2.456, 2.454, 2.454, 2.458, 2.508, 2.554, 2.594, 2.605, 2.605, 2.602
]
}
],
"calibrations_Cb": [
{
"ct": 3000,
"table":
[
3.221, 3.226, 3.231, 3.236, 3.239, 3.243, 3.245, 3.247, 3.249, 3.253, 3.255, 3.254, 3.253, 3.242, 3.235, 3.226,
3.225, 3.231, 3.235, 3.238, 3.241, 3.244, 3.246, 3.247, 3.249, 3.254, 3.256, 3.255, 3.252, 3.248, 3.241, 3.232,
3.226, 3.234, 3.239, 3.243, 3.243, 3.245, 3.247, 3.248, 3.251, 3.255, 3.256, 3.256, 3.254, 3.249, 3.244, 3.236,
3.232, 3.238, 3.245, 3.245, 3.246, 3.247, 3.248, 3.251, 3.251, 3.256, 3.257, 3.257, 3.256, 3.254, 3.249, 3.239,
3.232, 3.243, 3.246, 3.246, 3.246, 3.247, 3.248, 3.251, 3.253, 3.257, 3.258, 3.258, 3.257, 3.256, 3.254, 3.239,
3.232, 3.242, 3.246, 3.247, 3.246, 3.246, 3.248, 3.251, 3.252, 3.253, 3.256, 3.255, 3.255, 3.254, 3.251, 3.239,
3.233, 3.241, 3.244, 3.245, 3.244, 3.245, 3.246, 3.249, 3.251, 3.252, 3.253, 3.252, 3.252, 3.252, 3.249, 3.238,
3.238, 3.241, 3.246, 3.246, 3.245, 3.245, 3.247, 3.249, 3.251, 3.252, 3.253, 3.253, 3.252, 3.252, 3.249, 3.239,
3.235, 3.241, 3.245, 3.245, 3.245, 3.245, 3.246, 3.247, 3.251, 3.254, 3.253, 3.255, 3.256, 3.255, 3.251, 3.241,
3.226, 3.235, 3.241, 3.241, 3.241, 3.241, 3.243, 3.245, 3.246, 3.252, 3.253, 3.254, 3.256, 3.254, 3.241, 3.237,
3.205, 3.213, 3.213, 3.214, 3.214, 3.214, 3.214, 3.213, 3.213, 3.216, 3.218, 3.216, 3.214, 3.213, 3.211, 3.208,
3.205, 3.205, 3.212, 3.212, 3.212, 3.213, 3.211, 3.211, 3.211, 3.213, 3.216, 3.214, 3.213, 3.211, 3.208, 3.196
]
},
{
"ct": 5000,
"table":
[
1.645, 1.646, 1.649, 1.653, 1.654, 1.657, 1.659, 1.661, 1.663, 1.662, 1.661, 1.659, 1.656, 1.651, 1.645, 1.642,
1.646, 1.649, 1.652, 1.654, 1.656, 1.659, 1.662, 1.663, 1.664, 1.664, 1.662, 1.661, 1.657, 1.653, 1.649, 1.644,
1.648, 1.652, 1.654, 1.656, 1.658, 1.662, 1.665, 1.668, 1.668, 1.668, 1.665, 1.662, 1.658, 1.655, 1.652, 1.646,
1.649, 1.653, 1.656, 1.658, 1.661, 1.665, 1.667, 1.671, 1.673, 1.671, 1.668, 1.663, 1.659, 1.656, 1.654, 1.647,
1.649, 1.655, 1.657, 1.659, 1.661, 1.666, 1.671, 1.674, 1.675, 1.673, 1.671, 1.664, 1.659, 1.656, 1.654, 1.648,
1.649, 1.654, 1.656, 1.659, 1.661, 1.666, 1.673, 1.676, 1.676, 1.675, 1.671, 1.664, 1.659, 1.656, 1.654, 1.648,
1.649, 1.654, 1.656, 1.658, 1.659, 1.665, 1.672, 1.675, 1.675, 1.674, 1.668, 1.662, 1.658, 1.655, 1.654, 1.646,
1.652, 1.655, 1.657, 1.659, 1.661, 1.665, 1.671, 1.673, 1.673, 1.672, 1.668, 1.662, 1.658, 1.655, 1.654, 1.647,
1.652, 1.655, 1.657, 1.659, 1.661, 1.664, 1.667, 1.671, 1.672, 1.668, 1.666, 1.662, 1.659, 1.656, 1.654, 1.647,
1.647, 1.652, 1.655, 1.656, 1.657, 1.661, 1.664, 1.665, 1.665, 1.665, 1.663, 1.661, 1.657, 1.655, 1.647, 1.647,
1.639, 1.642, 1.644, 1.645, 1.646, 1.648, 1.648, 1.648, 1.649, 1.649, 1.649, 1.646, 1.645, 1.642, 1.639, 1.636,
1.639, 1.641, 1.642, 1.644, 1.645, 1.646, 1.647, 1.647, 1.648, 1.648, 1.647, 1.645, 1.642, 1.639, 1.636, 1.633
]
}
],
"luminance_lut":
[
2.644, 2.396, 2.077, 1.863, 1.682, 1.535, 1.392, 1.382, 1.382, 1.382, 1.515, 1.657, 1.826, 2.035, 2.351, 2.604,
2.497, 2.229, 1.947, 1.733, 1.539, 1.424, 1.296, 1.249, 1.249, 1.285, 1.401, 1.519, 1.699, 1.908, 2.183, 2.456,
2.389, 2.109, 1.848, 1.622, 1.424, 1.296, 1.201, 1.146, 1.146, 1.188, 1.285, 1.401, 1.591, 1.811, 2.065, 2.347,
2.317, 2.026, 1.771, 1.535, 1.339, 1.201, 1.145, 1.069, 1.069, 1.134, 1.188, 1.318, 1.505, 1.734, 1.983, 2.273,
2.276, 1.972, 1.715, 1.474, 1.281, 1.148, 1.069, 1.033, 1.024, 1.065, 1.134, 1.262, 1.446, 1.679, 1.929, 2.233,
2.268, 1.941, 1.682, 1.441, 1.251, 1.119, 1.033, 1.013, 1.013, 1.024, 1.105, 1.231, 1.415, 1.649, 1.898, 2.227,
2.268, 1.941, 1.682, 1.441, 1.251, 1.119, 1.033, 1.001, 1.001, 1.024, 1.105, 1.231, 1.415, 1.649, 1.898, 2.227,
2.268, 1.951, 1.694, 1.456, 1.265, 1.131, 1.044, 1.026, 1.019, 1.039, 1.118, 1.246, 1.429, 1.663, 1.912, 2.227,
2.291, 1.992, 1.738, 1.505, 1.311, 1.175, 1.108, 1.044, 1.041, 1.106, 1.161, 1.292, 1.478, 1.707, 1.955, 2.252,
2.347, 2.058, 1.803, 1.581, 1.384, 1.245, 1.175, 1.108, 1.108, 1.161, 1.239, 1.364, 1.551, 1.773, 2.023, 2.311,
2.438, 2.156, 1.884, 1.674, 1.484, 1.373, 1.245, 1.199, 1.199, 1.239, 1.363, 1.463, 1.647, 1.858, 2.123, 2.406,
2.563, 2.305, 1.998, 1.792, 1.615, 1.472, 1.339, 1.322, 1.322, 1.326, 1.456, 1.593, 1.767, 1.973, 2.273, 2.532
],
"sigma": 0.00178,
"sigma_Cb": 0.00217
}
},
{
"rpi.contrast":
{
"ce_enable": 1,
"gamma_curve":
[
0, 0,
1024, 5040,
2048, 9338,
3072, 12356,
4096, 15312,
5120, 18051,
6144, 20790,
7168, 23193,
8192, 25744,
9216, 27942,
10240, 30035,
11264, 32005,
12288, 33975,
13312, 35815,
14336, 37600,
15360, 39168,
16384, 40642,
18432, 43379,
20480, 45749,
22528, 47753,
24576, 49621,
26624, 51253,
28672, 52698,
30720, 53796,
32768, 54876,
36864, 57012,
40960, 58656,
45056, 59954,
49152, 61183,
53248, 62355,
57344, 63419,
61440, 64476,
65535, 65535
]
}
},
{
"rpi.ccm":
{
"ccms": [
{
"ct": 2498,
"ccm":
[
1.14912, 0.28638, -0.43551,
-0.49691, 1.60391, -0.10701,
-0.10513, -1.09534, 2.20047
]
},
{
"ct": 2821,
"ccm":
[
1.18251, 0.15501, -0.33752,
-0.44304, 1.58495, -0.14191,
-0.05077, -0.96422, 2.01498
]
},
{
"ct": 2925,
"ccm":
[
1.18668, 0.00195, -0.18864,
-0.41617, 1.50514, -0.08897,
-0.02675, -0.91143, 1.93818
]
},
{
"ct": 2926,
"ccm":
[
1.50948, -0.44421, -0.06527,
-0.37241, 1.41726, -0.04486,
0.07098, -0.84694, 1.77596
]
},
{
"ct": 2951,
"ccm":
[
1.52743, -0.47333, -0.05411,
-0.36485, 1.40764, -0.04279,
0.08672, -0.90479, 1.81807
]
},
{
"ct": 2954,
"ccm":
[
1.51683, -0.46841, -0.04841,
-0.36288, 1.39914, -0.03625,
0.06421, -0.82034, 1.75613
]
},
{
"ct": 3578,
"ccm":
[
1.59888, -0.59105, -0.00784,
-0.29366, 1.32037, -0.02671,
0.06627, -0.76465, 1.69838
]
},
{
"ct": 3717,
"ccm":
[
1.59063, -0.58059, -0.01003,
-0.29583, 1.32715, -0.03132,
0.03613, -0.67431, 1.63817
]
},
{
"ct": 3784,
"ccm":
[
1.59379, -0.58861, -0.00517,
-0.29178, 1.33292, -0.04115,
0.03541, -0.66162, 1.62622
]
},
{
"ct": 4485,
"ccm":
[
1.40761, -0.34561, -0.06201,
-0.32388, 1.57221, -0.24832,
-0.01014, -0.63427, 1.64441
]
},
{
"ct": 4615,
"ccm":
[
1.41537, -0.35832, -0.05705,
-0.31429, 1.56019, -0.24591,
-0.01761, -0.61859, 1.63621
]
},
{
"ct": 4671,
"ccm":
[
1.42941, -0.38178, -0.04764,
-0.31421, 1.55925, -0.24504,
-0.01141, -0.62987, 1.64129
]
},
{
"ct": 5753,
"ccm":
[
1.64549, -0.63329, -0.01221,
-0.22431, 1.36423, -0.13992,
-0.00831, -0.55373, 1.56204
]
},
{
"ct": 5773,
"ccm":
[
1.63668, -0.63557, -0.00111,
-0.21919, 1.36234, -0.14315,
-0.00399, -0.57428, 1.57827
]
},
{
"ct": 7433,
"ccm":
[
1.36007, -0.09277, -0.26729,
-0.36886, 2.09249, -0.72363,
-0.12573, -0.76761, 1.89334
]
},
{
"ct": 55792,
"ccm":
[
1.65091, -0.63689, -0.01401,
-0.22277, 1.35752, -0.13475,
-0.00943, -0.55091, 1.56033
]
}
]
}
},
{
"rpi.sharpen": { }
},
{
"rpi.af":
{
"ranges":
{
"normal":
{
"min": 0.0,
"max": 12.0,
"default": 1.0
},
"macro":
{
"min": 3.0,
"max": 15.0,
"default": 4.0
}
},
"speeds":
{
"normal":
{
"step_coarse": 1.0,
"step_fine": 0.25,
"contrast_ratio": 0.75,
"pdaf_gain": -0.02,
"pdaf_squelch": 0.125,
"max_slew": 2.0,
"pdaf_frames": 20,
"dropout_frames": 6,
"step_frames": 4
}
},
"conf_epsilon": 8,
"conf_thresh": 16,
"conf_clip": 512,
"skip_frames": 5,
"map": [ 0.0, 445, 15.0, 925 ]
}
},
{
"rpi.hdr":
{
"MultiExposureUnmerged":
{
"cadence": [ 1, 2 ],
"channel_map":
{
"short": 1,
"long": 2
}
}
}
}
]
} |
0 | repos/libcamera/src/py | repos/libcamera/src/py/examples/simple-capture.py | #!/usr/bin/env python3
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
# A simple capture example showing:
# - How to setup the camera
# - Capture certain number of frames in a blocking manner
# - How to stop the camera
#
# This simple example is, in many ways, too simple. The purpose of the example
# is to introduce the concepts. A more realistic example is given in
# simple-continuous-capture.py.
import argparse
import libcamera as libcam
import selectors
import sys
# Number of frames to capture
TOTAL_FRAMES = 30
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--camera', type=str, default='1',
help='Camera index number (starting from 1) or part of the name')
parser.add_argument('-f', '--format', type=str, help='Pixel format')
parser.add_argument('-s', '--size', type=str, help='Size ("WxH")')
args = parser.parse_args()
cm = libcam.CameraManager.singleton()
try:
if args.camera.isnumeric():
cam_idx = int(args.camera)
cam = next((cam for i, cam in enumerate(cm.cameras) if i + 1 == cam_idx))
else:
cam = next((cam for cam in cm.cameras if args.camera in cam.id))
except Exception:
print(f'Failed to find camera "{args.camera}"')
return -1
# Acquire the camera for our use
cam.acquire()
# Configure the camera
cam_config = cam.generate_configuration([libcam.StreamRole.Viewfinder])
stream_config = cam_config.at(0)
if args.format:
fmt = libcam.PixelFormat(args.format)
stream_config.pixel_format = fmt
if args.size:
w, h = [int(v) for v in args.size.split('x')]
stream_config.size = libcam.Size(w, h)
cam.configure(cam_config)
print(f'Capturing {TOTAL_FRAMES} frames with {stream_config}')
stream = stream_config.stream
# Allocate the buffers for capture
allocator = libcam.FrameBufferAllocator(cam)
ret = allocator.allocate(stream)
assert ret > 0
num_bufs = len(allocator.buffers(stream))
# Create the requests and assign a buffer for each request
reqs = []
for i in range(num_bufs):
# Use the buffer index as the cookie
req = cam.create_request(i)
buffer = allocator.buffers(stream)[i]
req.add_buffer(stream, buffer)
reqs.append(req)
# Start the camera
cam.start()
# frames_queued and frames_done track the number of frames queued and done
frames_queued = 0
frames_done = 0
# Queue the requests to the camera
for req in reqs:
cam.queue_request(req)
frames_queued += 1
# The main loop. Wait for the queued Requests to complete, process them,
# and re-queue them again.
sel = selectors.DefaultSelector()
sel.register(cm.event_fd, selectors.EVENT_READ)
while frames_done < TOTAL_FRAMES:
# cm.get_ready_requests() does not block, so we use a Selector to wait
# for a camera event. Here we should almost always get a single
# Request, but in some cases there could be multiple or none.
events = sel.select()
if not events:
continue
reqs = cm.get_ready_requests()
for req in reqs:
frames_done += 1
buffers = req.buffers
# A ready Request could contain multiple buffers if multiple streams
# were being used. Here we know we only have a single stream,
# and we use next(iter()) to get the first and only buffer.
assert len(buffers) == 1
stream, fb = next(iter(buffers.items()))
# Here we could process the received buffer. In this example we only
# print a few details below.
meta = fb.metadata
print("seq {:3}, bytes {}, frames queued/done {:3}/{:<3}"
.format(meta.sequence,
'/'.join([str(p.bytes_used) for p in meta.planes]),
frames_queued, frames_done))
# If we want to capture more frames we need to queue more Requests.
# We could create a totally new Request, but it is more efficient
# to reuse the existing one that we just received.
if frames_queued < TOTAL_FRAMES:
req.reuse()
cam.queue_request(req)
frames_queued += 1
# Stop the camera
cam.stop()
# Release the camera
cam.release()
return 0
if __name__ == '__main__':
sys.exit(main())
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/examples/simple-continuous-capture.py | #!/usr/bin/env python3
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
# A simple capture example extending the simple-capture.py example:
# - Capture frames using events from multiple cameras
# - Listening events from stdin to exit the application
# - Memory mapping the frames and calculating CRC
import binascii
import libcamera as libcam
import libcamera.utils
import selectors
import sys
# A container class for our state per camera
class CameraCaptureContext:
idx: int
cam: libcam.Camera
reqs: list[libcam.Request]
mfbs: dict[libcam.FrameBuffer, libcamera.utils.MappedFrameBuffer]
def __init__(self, cam, idx):
self.idx = idx
self.cam = cam
# Acquire the camera for our use
cam.acquire()
# Configure the camera
cam_config = cam.generate_configuration([libcam.StreamRole.Viewfinder])
stream_config = cam_config.at(0)
cam.configure(cam_config)
stream = stream_config.stream
# Allocate the buffers for capture
allocator = libcam.FrameBufferAllocator(cam)
ret = allocator.allocate(stream)
assert ret > 0
num_bufs = len(allocator.buffers(stream))
print(f'cam{idx} ({cam.id}): capturing {num_bufs} buffers with {stream_config}')
# Create the requests and assign a buffer for each request
self.reqs = []
self.mfbs = {}
for i in range(num_bufs):
# Use the buffer index as the "cookie"
req = cam.create_request(idx)
buffer = allocator.buffers(stream)[i]
req.add_buffer(stream, buffer)
self.reqs.append(req)
# Save a mmapped buffer so we can calculate the CRC later
self.mfbs[buffer] = libcamera.utils.MappedFrameBuffer(buffer).mmap()
def uninit_camera(self):
# Stop the camera
self.cam.stop()
# Release the camera
self.cam.release()
# A container class for our state
class CaptureContext:
cm: libcam.CameraManager
camera_contexts: list[CameraCaptureContext] = []
def handle_camera_event(self):
# cm.get_ready_requests() returns the ready requests, which in our case
# should almost always return a single Request, but in some cases there
# could be multiple or none.
reqs = self.cm.get_ready_requests()
# Process the captured frames
for req in reqs:
self.handle_request(req)
return True
def handle_request(self, req: libcam.Request):
cam_ctx = self.camera_contexts[req.cookie]
buffers = req.buffers
assert len(buffers) == 1
# A ready Request could contain multiple buffers if multiple streams
# were being used. Here we know we only have a single stream,
# and we use next(iter()) to get the first and only buffer.
stream, fb = next(iter(buffers.items()))
# Use the MappedFrameBuffer to access the pixel data with CPU. We calculate
# the crc for each plane.
mfb = cam_ctx.mfbs[fb]
crcs = [binascii.crc32(p) for p in mfb.planes]
meta = fb.metadata
print('cam{:<6} seq {:<6} bytes {:10} CRCs {}'
.format(cam_ctx.idx,
meta.sequence,
'/'.join([str(p.bytes_used) for p in meta.planes]),
crcs))
# We want to re-queue the buffer we just handled. Instead of creating
# a new Request, we re-use the old one. We need to call req.reuse()
# to re-initialize the Request before queuing.
req.reuse()
cam_ctx.cam.queue_request(req)
def handle_key_event(self):
sys.stdin.readline()
print('Exiting...')
return False
def capture(self):
# Queue the requests to the camera
for cam_ctx in self.camera_contexts:
for req in cam_ctx.reqs:
cam_ctx.cam.queue_request(req)
# Use Selector to wait for events from the camera and from the keyboard
sel = selectors.DefaultSelector()
sel.register(sys.stdin, selectors.EVENT_READ, self.handle_key_event)
sel.register(self.cm.event_fd, selectors.EVENT_READ, lambda: self.handle_camera_event())
running = True
while running:
events = sel.select()
for key, mask in events:
# If the handler return False, we should exit
if not key.data():
running = False
def main():
cm = libcam.CameraManager.singleton()
ctx = CaptureContext()
ctx.cm = cm
for idx, cam in enumerate(cm.cameras):
cam_ctx = CameraCaptureContext(cam, idx)
ctx.camera_contexts.append(cam_ctx)
# Start the cameras
for cam_ctx in ctx.camera_contexts:
cam_ctx.cam.start()
ctx.capture()
for cam_ctx in ctx.camera_contexts:
cam_ctx.uninit_camera()
return 0
if __name__ == '__main__':
sys.exit(main())
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/examples/simple-cam.py | #!/usr/bin/env python3
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
# A simple libcamera capture example
#
# This is a python version of simple-cam from:
# https://git.libcamera.org/libcamera/simple-cam.git
#
# \todo Move to simple-cam repository when the Python API has stabilized more
import libcamera as libcam
import selectors
import sys
import time
TIMEOUT_SEC = 3
def handle_camera_event(cm):
# cm.get_ready_requests() returns the ready requests, which in our case
# should almost always return a single Request, but in some cases there
# could be multiple or none.
reqs = cm.get_ready_requests()
# Process the captured frames
for req in reqs:
process_request(req)
def process_request(request):
global camera
print()
print(f'Request completed: {request}')
# When a request has completed, it is populated with a metadata control
# list that allows an application to determine various properties of
# the completed request. This can include the timestamp of the Sensor
# capture, or its gain and exposure values, or properties from the IPA
# such as the state of the 3A algorithms.
#
# To examine each request, print all the metadata for inspection. A custom
# application can parse each of these items and process them according to
# its needs.
requestMetadata = request.metadata
for id, value in requestMetadata.items():
print(f'\t{id.name} = {value}')
# Each buffer has its own FrameMetadata to describe its state, or the
# usage of each buffer. While in our simple capture we only provide one
# buffer per request, a request can have a buffer for each stream that
# is established when configuring the camera.
#
# This allows a viewfinder and a still image to be processed at the
# same time, or to allow obtaining the RAW capture buffer from the
# sensor along with the image as processed by the ISP.
buffers = request.buffers
for _, buffer in buffers.items():
metadata = buffer.metadata
# Print some information about the buffer which has completed.
print(f' seq: {metadata.sequence:06} timestamp: {metadata.timestamp} bytesused: ' +
'/'.join([str(p.bytes_used) for p in metadata.planes]))
# Image data can be accessed here, but the FrameBuffer
# must be mapped by the application
# Re-queue the Request to the camera.
request.reuse()
camera.queue_request(request)
# ----------------------------------------------------------------------------
# Camera Naming.
#
# Applications are responsible for deciding how to name cameras, and present
# that information to the users. Every camera has a unique identifier, though
# this string is not designed to be friendly for a human reader.
#
# To support human consumable names, libcamera provides camera properties
# that allow an application to determine a naming scheme based on its needs.
#
# In this example, we focus on the location property, but also detail the
# model string for external cameras, as this is more likely to be visible
# information to the user of an externally connected device.
#
# The unique camera ID is appended for informative purposes.
#
def camera_name(camera):
props = camera.properties
location = props.get(libcam.properties.Location, None)
if location == libcam.properties.LocationEnum.Front:
name = 'Internal front camera'
elif location == libcam.properties.LocationEnum.Back:
name = 'Internal back camera'
elif location == libcam.properties.LocationEnum.External:
name = 'External camera'
if libcam.properties.Model in props:
name += f' "{props[libcam.properties.Model]}"'
else:
name = 'Undefined location'
name += f' ({camera.id})'
return name
def main():
global camera
# --------------------------------------------------------------------
# Get the Camera Manager.
#
# The Camera Manager is responsible for enumerating all the Camera
# in the system, by associating Pipeline Handlers with media entities
# registered in the system.
#
# The CameraManager provides a list of available Cameras that
# applications can operate on.
#
# There can only be a single CameraManager within any process space.
cm = libcam.CameraManager.singleton()
# Just as a test, generate names of the Cameras registered in the
# system, and list them.
for camera in cm.cameras:
print(f' - {camera_name(camera)}')
# --------------------------------------------------------------------
# Camera
#
# Camera are entities created by pipeline handlers, inspecting the
# entities registered in the system and reported to applications
# by the CameraManager.
#
# In general terms, a Camera corresponds to a single image source
# available in the system, such as an image sensor.
#
# Application lock usage of Camera by 'acquiring' them.
# Once done with it, application shall similarly 'release' the Camera.
#
# As an example, use the first available camera in the system after
# making sure that at least one camera is available.
#
# Cameras can be obtained by their ID or their index, to demonstrate
# this, the following code gets the ID of the first camera; then gets
# the camera associated with that ID (which is of course the same as
# cm.cameras[0]).
if not cm.cameras:
print('No cameras were identified on the system.')
return -1
camera_id = cm.cameras[0].id
camera = cm.get(camera_id)
camera.acquire()
# --------------------------------------------------------------------
# Stream
#
# Each Camera supports a variable number of Stream. A Stream is
# produced by processing data produced by an image source, usually
# by an ISP.
#
# +-------------------------------------------------------+
# | Camera |
# | +-----------+ |
# | +--------+ | |------> [ Main output ] |
# | | Image | | | |
# | | |---->| ISP |------> [ Viewfinder ] |
# | | Source | | | |
# | +--------+ | |------> [ Still Capture ] |
# | +-----------+ |
# +-------------------------------------------------------+
#
# The number and capabilities of the Stream in a Camera are
# a platform dependent property, and it's the pipeline handler
# implementation that has the responsibility of correctly
# report them.
# --------------------------------------------------------------------
# Camera Configuration.
#
# Camera configuration is tricky! It boils down to assign resources
# of the system (such as DMA engines, scalers, format converters) to
# the different image streams an application has requested.
#
# Depending on the system characteristics, some combinations of
# sizes, formats and stream usages might or might not be possible.
#
# A Camera produces a CameraConfigration based on a set of intended
# roles for each Stream the application requires.
config = camera.generate_configuration([libcam.StreamRole.Viewfinder])
# The CameraConfiguration contains a StreamConfiguration instance
# for each StreamRole requested by the application, provided
# the Camera can support all of them.
#
# Each StreamConfiguration has default size and format, assigned
# by the Camera depending on the Role the application has requested.
stream_config = config.at(0)
print(f'Default viewfinder configuration is: {stream_config}')
# Each StreamConfiguration parameter which is part of a
# CameraConfiguration can be independently modified by the
# application.
#
# In order to validate the modified parameter, the CameraConfiguration
# should be validated -before- the CameraConfiguration gets applied
# to the Camera.
#
# The CameraConfiguration validation process adjusts each
# StreamConfiguration to a valid value.
# Validating a CameraConfiguration -before- applying it will adjust it
# to a valid configuration which is as close as possible to the one
# requested.
config.validate()
print(f'Validated viewfinder configuration is: {stream_config}')
# Once we have a validated configuration, we can apply it to the
# Camera.
camera.configure(config)
# --------------------------------------------------------------------
# Buffer Allocation
#
# Now that a camera has been configured, it knows all about its
# Streams sizes and formats. The captured images need to be stored in
# framebuffers which can either be provided by the application to the
# library, or allocated in the Camera and exposed to the application
# by libcamera.
#
# An application may decide to allocate framebuffers from elsewhere,
# for example in memory allocated by the display driver that will
# render the captured frames. The application will provide them to
# libcamera by constructing FrameBuffer instances to capture images
# directly into.
#
# Alternatively libcamera can help the application by exporting
# buffers allocated in the Camera using a FrameBufferAllocator
# instance and referencing a configured Camera to determine the
# appropriate buffer size and types to create.
allocator = libcam.FrameBufferAllocator(camera)
for cfg in config:
allocated = allocator.allocate(cfg.stream)
print(f'Allocated {allocated} buffers for stream')
# --------------------------------------------------------------------
# Frame Capture
#
# libcamera frames capture model is based on the 'Request' concept.
# For each frame a Request has to be queued to the Camera.
#
# A Request refers to (at least one) Stream for which a Buffer that
# will be filled with image data shall be added to the Request.
#
# A Request is associated with a list of Controls, which are tunable
# parameters (similar to v4l2_controls) that have to be applied to
# the image.
#
# Once a request completes, all its buffers will contain image data
# that applications can access and for each of them a list of metadata
# properties that reports the capture parameters applied to the image.
stream = stream_config.stream
buffers = allocator.buffers(stream)
requests = []
for i in range(len(buffers)):
request = camera.create_request()
buffer = buffers[i]
request.add_buffer(stream, buffer)
# Controls can be added to a request on a per frame basis.
request.set_control(libcam.controls.Brightness, 0.5)
requests.append(request)
# --------------------------------------------------------------------
# Start Capture
#
# In order to capture frames the Camera has to be started and
# Request queued to it. Enough Request to fill the Camera pipeline
# depth have to be queued before the Camera start delivering frames.
#
# When a Request has been completed, it will be added to a list in the
# CameraManager and an event will be raised using eventfd.
#
# The list of completed Requests can be retrieved with
# CameraManager.get_ready_requests(), which will also clear the list in the
# CameraManager.
#
# The eventfd can be retrieved from CameraManager.event_fd, and the fd can
# be waited upon using e.g. Python's selectors.
camera.start()
for request in requests:
camera.queue_request(request)
sel = selectors.DefaultSelector()
sel.register(cm.event_fd, selectors.EVENT_READ, lambda fd: handle_camera_event(cm))
start_time = time.time()
while time.time() - start_time < TIMEOUT_SEC:
events = sel.select()
for key, mask in events:
key.data(key.fileobj)
# --------------------------------------------------------------------
# Clean Up
#
# Stop the Camera, release resources and stop the CameraManager.
# libcamera has now released all resources it owned.
camera.stop()
camera.release()
return 0
if __name__ == '__main__':
sys.exit(main())
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/gl_helpers.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from OpenGL.EGL.VERSION.EGL_1_0 import EGLNativeDisplayType, eglGetProcAddress, eglQueryString, EGL_EXTENSIONS
from OpenGL.raw.GLES2 import _types as _cs
from OpenGL.GLES2.VERSION.GLES2_2_0 import *
from OpenGL.GLES3.VERSION.GLES3_3_0 import *
from OpenGL import GL as gl
from ctypes import c_int, c_char_p, c_void_p, cdll, POINTER, util, \
pointer, CFUNCTYPE, c_bool
def getEGLNativeDisplay():
_x11lib = cdll.LoadLibrary(util.find_library('X11'))
XOpenDisplay = _x11lib.XOpenDisplay
XOpenDisplay.argtypes = [c_char_p]
XOpenDisplay.restype = POINTER(EGLNativeDisplayType)
return XOpenDisplay(None)
# Hack. PyOpenGL doesn't seem to manage to find glEGLImageTargetTexture2DOES.
def getglEGLImageTargetTexture2DOES():
funcptr = eglGetProcAddress('glEGLImageTargetTexture2DOES')
prototype = CFUNCTYPE(None, _cs.GLenum, _cs.GLeglImageOES)
return prototype(funcptr)
glEGLImageTargetTexture2DOES = getglEGLImageTargetTexture2DOES()
def get_gl_extensions():
n = GLint()
glGetIntegerv(GL_NUM_EXTENSIONS, n)
gl_extensions = []
for i in range(n.value):
gl_extensions.append(gl.glGetStringi(GL_EXTENSIONS, i).decode())
return gl_extensions
def check_gl_extensions(required_extensions):
extensions = get_gl_extensions()
if False:
print('GL EXTENSIONS: ', ' '.join(extensions))
for ext in required_extensions:
if ext not in extensions:
raise Exception(ext + ' missing')
def get_egl_extensions(egl_display):
return eglQueryString(egl_display, EGL_EXTENSIONS).decode().split(' ')
def check_egl_extensions(egl_display, required_extensions):
extensions = get_egl_extensions(egl_display)
if False:
print('EGL EXTENSIONS: ', ' '.join(extensions))
for ext in required_extensions:
if ext not in extensions:
raise Exception(ext + ' missing')
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/cam.py | #!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from typing import Any
import argparse
import binascii
import libcamera as libcam
import libcamera.utils
import sys
import traceback
class CameraContext:
camera: libcam.Camera
id: str
idx: int
opt_stream: str
opt_strict_formats: bool
opt_crc: bool
opt_metadata: bool
opt_save_frames: bool
opt_capture: int
opt_orientation: str
stream_names: dict[libcam.Stream, str]
streams: list[libcam.Stream]
allocator: libcam.FrameBufferAllocator
requests: list[libcam.Request]
reqs_queued: int
reqs_completed: int
last: int = 0
fps: float
def __init__(self, camera, idx):
self.camera = camera
self.idx = idx
self.id = 'cam' + str(idx)
self.reqs_queued = 0
self.reqs_completed = 0
def do_cmd_list_props(self):
print('Properties for', self.id)
for cid, val in self.camera.properties.items():
print('\t{}: {}'.format(cid, val))
def do_cmd_list_controls(self):
print('Controls for', self.id)
for cid, info in self.camera.controls.items():
print('\t{}: {}'.format(cid, info))
def do_cmd_info(self):
print('Stream info for', self.id)
roles = [libcam.StreamRole.Viewfinder]
camconfig = self.camera.generate_configuration(roles)
if camconfig is None:
raise Exception('Generating config failed')
for i, stream_config in enumerate(camconfig):
print('\t{}: {}'.format(i, stream_config))
formats = stream_config.formats
for fmt in formats.pixel_formats:
print('\t * Pixelformat:', fmt, formats.range(fmt))
for size in formats.sizes(fmt):
print('\t -', size)
def acquire(self):
self.camera.acquire()
def release(self):
self.camera.release()
def __parse_streams(self):
streams = []
for stream_desc in self.opt_stream:
stream_opts: dict[str, Any]
stream_opts = {'role': libcam.StreamRole.Viewfinder}
for stream_opt in stream_desc.split(','):
if stream_opt == 0:
continue
arr = stream_opt.split('=')
if len(arr) != 2:
print('Bad stream option', stream_opt)
sys.exit(-1)
key = arr[0]
value = arr[1]
if key in ['width', 'height']:
value = int(value)
elif key == 'role':
rolemap = {
'still': libcam.StreamRole.StillCapture,
'raw': libcam.StreamRole.Raw,
'video': libcam.StreamRole.VideoRecording,
'viewfinder': libcam.StreamRole.Viewfinder,
}
role = rolemap.get(value.lower(), None)
if role is None:
print('Bad stream role', value)
sys.exit(-1)
value = role
elif key == 'pixelformat':
pass
else:
print('Bad stream option key', key)
sys.exit(-1)
stream_opts[key] = value
streams.append(stream_opts)
return streams
def configure(self):
streams = self.__parse_streams()
roles = [opts['role'] for opts in streams]
camconfig = self.camera.generate_configuration(roles)
if camconfig is None:
raise Exception('Generating config failed')
for idx, stream_opts in enumerate(streams):
stream_config = camconfig.at(idx)
if 'width' in stream_opts:
stream_config.size.width = stream_opts['width']
if 'height' in stream_opts:
stream_config.size.height = stream_opts['height']
if 'pixelformat' in stream_opts:
stream_config.pixel_format = libcam.PixelFormat(stream_opts['pixelformat'])
if self.opt_orientation is not None:
orientation_map = {
'rot0': libcam.Orientation.Rotate0,
'rot180': libcam.Orientation.Rotate180,
'mirror': libcam.Orientation.Rotate0Mirror,
'flip': libcam.Orientation.Rotate180Mirror,
}
orient = orientation_map.get(self.opt_orientation, None)
if orient is None:
print('Bad orientation: ', self.opt_orientation)
sys.exit(-1)
camconfig.orientation = orient
stat = camconfig.validate()
if stat == libcam.CameraConfiguration.Status.Invalid:
print('Camera configuration invalid')
exit(-1)
elif stat == libcam.CameraConfiguration.Status.Adjusted:
if self.opt_strict_formats:
print('Adjusting camera configuration disallowed by --strict-formats argument')
exit(-1)
print('Camera configuration adjusted')
self.camera.configure(camconfig)
self.stream_names = {}
self.streams = []
for idx, stream_config in enumerate(camconfig):
stream = stream_config.stream
self.streams.append(stream)
self.stream_names[stream] = 'stream' + str(idx)
print('{}-{}: stream config {}'.format(self.id, self.stream_names[stream], stream.configuration))
def alloc_buffers(self):
allocator = libcam.FrameBufferAllocator(self.camera)
for stream in self.streams:
allocated = allocator.allocate(stream)
print('{}-{}: Allocated {} buffers'.format(self.id, self.stream_names[stream], allocated))
self.allocator = allocator
def create_requests(self):
self.requests = []
# Identify the stream with the least number of buffers
num_bufs = min([len(self.allocator.buffers(stream)) for stream in self.streams])
requests = []
for buf_num in range(num_bufs):
request = self.camera.create_request(self.idx)
if request is None:
print('Can not create request')
exit(-1)
for stream in self.streams:
buffers = self.allocator.buffers(stream)
buffer = buffers[buf_num]
request.add_buffer(stream, buffer)
requests.append(request)
self.requests = requests
def start(self):
self.camera.start()
def stop(self):
self.camera.stop()
def queue_requests(self):
for request in self.requests:
self.camera.queue_request(request)
self.reqs_queued += 1
del self.requests
class CaptureState:
cm: libcam.CameraManager
contexts: list[CameraContext]
renderer: Any
def __init__(self, cm, contexts):
self.cm = cm
self.contexts = contexts
# Called from renderer when there is a libcamera event
def event_handler(self):
try:
reqs = self.cm.get_ready_requests()
for req in reqs:
ctx = next(ctx for ctx in self.contexts if ctx.idx == req.cookie)
self.__request_handler(ctx, req)
running = any(ctx.reqs_completed < ctx.opt_capture for ctx in self.contexts)
return running
except Exception:
traceback.print_exc()
return False
def __request_handler(self, ctx, req):
if req.status != libcam.Request.Status.Complete:
raise Exception('{}: Request failed: {}'.format(ctx.id, req.status))
buffers = req.buffers
# Compute the frame rate. The timestamp is arbitrarily retrieved from
# the first buffer, as all buffers should have matching timestamps.
ts = buffers[next(iter(buffers))].metadata.timestamp
last = ctx.last
fps = 1000000000.0 / (ts - last) if (last != 0 and (ts - last) != 0) else 0
ctx.last = ts
ctx.fps = fps
if ctx.opt_metadata:
reqmeta = req.metadata
for ctrl, val in reqmeta.items():
print(f'\t{ctrl} = {val}')
for stream, fb in buffers.items():
stream_name = ctx.stream_names[stream]
crcs = []
if ctx.opt_crc:
with libcamera.utils.MappedFrameBuffer(fb) as mfb:
plane_crcs = [binascii.crc32(p) for p in mfb.planes]
crcs.append(plane_crcs)
meta = fb.metadata
print('{:.6f} ({:.2f} fps) {}-{}: seq {}, bytes {}, CRCs {}'
.format(ts / 1000000000, fps,
ctx.id, stream_name,
meta.sequence,
'/'.join([str(p.bytes_used) for p in meta.planes]),
crcs))
if ctx.opt_save_frames:
with libcamera.utils.MappedFrameBuffer(fb) as mfb:
filename = 'frame-{}-{}-{}.data'.format(ctx.id, stream_name, ctx.reqs_completed)
with open(filename, 'wb') as f:
for p in mfb.planes:
f.write(p)
self.renderer.request_handler(ctx, req)
ctx.reqs_completed += 1
# Called from renderer when it has finished with a request
def request_processed(self, ctx, req):
if ctx.reqs_queued < ctx.opt_capture:
req.reuse()
ctx.camera.queue_request(req)
ctx.reqs_queued += 1
def __capture_init(self):
for ctx in self.contexts:
ctx.acquire()
for ctx in self.contexts:
ctx.configure()
for ctx in self.contexts:
ctx.alloc_buffers()
for ctx in self.contexts:
ctx.create_requests()
def __capture_start(self):
for ctx in self.contexts:
ctx.start()
for ctx in self.contexts:
ctx.queue_requests()
def __capture_deinit(self):
for ctx in self.contexts:
ctx.stop()
for ctx in self.contexts:
ctx.release()
def do_cmd_capture(self):
self.__capture_init()
self.renderer.setup()
self.__capture_start()
self.renderer.run()
self.__capture_deinit()
class CustomAction(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
super().__init__(option_strings, dest, default={}, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if len(namespace.camera) == 0:
print(f'Option {option_string} requires a --camera context')
sys.exit(-1)
if self.type == bool:
values = True
current = namespace.camera[-1]
data = getattr(namespace, self.dest)
if self.nargs == '+':
if current not in data:
data[current] = []
data[current] += values
else:
data[current] = values
def do_cmd_list(cm):
print('Available cameras:')
for idx, c in enumerate(cm.cameras):
print(f'{idx + 1}: {c.id}')
def main():
parser = argparse.ArgumentParser()
# global options
parser.add_argument('-l', '--list', action='store_true', help='List all cameras')
parser.add_argument('-c', '--camera', type=int, action='extend', nargs=1, default=[], help='Specify which camera to operate on, by index')
parser.add_argument('-p', '--list-properties', action='store_true', help='List cameras properties')
parser.add_argument('--list-controls', action='store_true', help='List cameras controls')
parser.add_argument('-I', '--info', action='store_true', help='Display information about stream(s)')
parser.add_argument('-R', '--renderer', default='null', help='Renderer (null, kms, qt, qtgl)')
# per camera options
parser.add_argument('-C', '--capture', nargs='?', type=int, const=1000000, action=CustomAction, help='Capture until interrupted by user or until CAPTURE frames captured')
parser.add_argument('--crc', nargs=0, type=bool, action=CustomAction, help='Print CRC32 for captured frames')
parser.add_argument('--save-frames', nargs=0, type=bool, action=CustomAction, help='Save captured frames to files')
parser.add_argument('--metadata', nargs=0, type=bool, action=CustomAction, help='Print the metadata for completed requests')
parser.add_argument('--strict-formats', type=bool, nargs=0, action=CustomAction, help='Do not allow requested stream format(s) to be adjusted')
parser.add_argument('-s', '--stream', nargs='+', action=CustomAction)
parser.add_argument('-o', '--orientation', help='Desired image orientation (rot0, rot180, mirror, flip)')
args = parser.parse_args()
cm = libcam.CameraManager.singleton()
if args.list:
do_cmd_list(cm)
contexts = []
for cam_idx in args.camera:
camera = next((c for i, c in enumerate(cm.cameras) if i + 1 == cam_idx), None)
if camera is None:
print('Unable to find camera', cam_idx)
return -1
ctx = CameraContext(camera, cam_idx)
ctx.opt_capture = args.capture.get(cam_idx, 0)
ctx.opt_crc = args.crc.get(cam_idx, False)
ctx.opt_save_frames = args.save_frames.get(cam_idx, False)
ctx.opt_metadata = args.metadata.get(cam_idx, False)
ctx.opt_strict_formats = args.strict_formats.get(cam_idx, False)
ctx.opt_stream = args.stream.get(cam_idx, ['role=viewfinder'])
ctx.opt_orientation = args.orientation
contexts.append(ctx)
for ctx in contexts:
print('Using camera {} as {}'.format(ctx.camera.id, ctx.id))
for ctx in contexts:
if args.list_properties:
ctx.do_cmd_list_props()
if args.list_controls:
ctx.do_cmd_list_controls()
if args.info:
ctx.do_cmd_info()
# Filter out capture contexts which are not marked for capture
contexts = [ctx for ctx in contexts if ctx.opt_capture > 0]
if contexts:
state = CaptureState(cm, contexts)
if args.renderer == 'null':
import cam_null
renderer = cam_null.NullRenderer(state)
elif args.renderer == 'kms':
import cam_kms
renderer = cam_kms.KMSRenderer(state)
elif args.renderer == 'qt':
import cam_qt
renderer = cam_qt.QtRenderer(state)
elif args.renderer == 'qtgl':
import cam_qtgl
renderer = cam_qtgl.QtRenderer(state)
else:
print('Bad renderer', args.renderer)
return -1
state.renderer = renderer
state.do_cmd_capture()
return 0
if __name__ == '__main__':
sys.exit(main())
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/cam_qt.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from helpers import mfb_to_rgb
from PyQt5 import QtCore, QtGui, QtWidgets
import libcamera as libcam
import libcamera.utils
import sys
# Loading MJPEG to a QPixmap produces corrupt JPEG data warnings. Ignore these.
def qt_message_handler(msg_type, msg_log_context, msg_string):
if msg_string.startswith("Corrupt JPEG data"):
return
# For some reason qInstallMessageHandler returns None, so we won't
# call the old handler
if old_msg_handler is not None:
old_msg_handler(msg_type, msg_log_context, msg_string)
else:
print(msg_string)
old_msg_handler = QtCore.qInstallMessageHandler(qt_message_handler)
def rgb_to_pix(rgb):
w = rgb.shape[1]
h = rgb.shape[0]
qim = QtGui.QImage(rgb, w, h, QtGui.QImage.Format.Format_RGB888)
pix = QtGui.QPixmap.fromImage(qim)
return pix
class QtRenderer:
def __init__(self, state):
self.state = state
self.cm = state.cm
self.contexts = state.contexts
def setup(self):
self.app = QtWidgets.QApplication([])
windows = []
for ctx in self.contexts:
for stream in ctx.streams:
window = MainWindow(ctx, stream)
window.show()
windows.append(window)
self.windows = windows
buf_mmap_map = {}
for ctx in self.contexts:
for stream in ctx.streams:
for buf in ctx.allocator.buffers(stream):
mfb = libcamera.utils.MappedFrameBuffer(buf).mmap()
buf_mmap_map[buf] = mfb
self.buf_mmap_map = buf_mmap_map
def run(self):
camnotif = QtCore.QSocketNotifier(self.cm.event_fd, QtCore.QSocketNotifier.Read)
camnotif.activated.connect(lambda _: self.readcam())
keynotif = QtCore.QSocketNotifier(sys.stdin.fileno(), QtCore.QSocketNotifier.Read)
keynotif.activated.connect(lambda _: self.readkey())
print('Capturing...')
self.app.exec()
print('Exiting...')
def readcam(self):
running = self.state.event_handler()
if not running:
self.app.quit()
def readkey(self):
sys.stdin.readline()
self.app.quit()
def request_handler(self, ctx, req):
buffers = req.buffers
for stream, fb in buffers.items():
wnd = next(wnd for wnd in self.windows if wnd.stream == stream)
mfb = self.buf_mmap_map[fb]
wnd.handle_request(stream, mfb)
self.state.request_processed(ctx, req)
def cleanup(self):
for w in self.windows:
w.close()
class MainWindow(QtWidgets.QWidget):
def __init__(self, ctx, stream):
super().__init__()
self.ctx = ctx
self.stream = stream
self.label = QtWidgets.QLabel()
windowLayout = QtWidgets.QHBoxLayout()
self.setLayout(windowLayout)
windowLayout.addWidget(self.label)
controlsLayout = QtWidgets.QVBoxLayout()
windowLayout.addLayout(controlsLayout)
windowLayout.addStretch()
group = QtWidgets.QGroupBox('Info')
groupLayout = QtWidgets.QVBoxLayout()
group.setLayout(groupLayout)
controlsLayout.addWidget(group)
lab = QtWidgets.QLabel(ctx.id)
groupLayout.addWidget(lab)
self.frameLabel = QtWidgets.QLabel()
groupLayout.addWidget(self.frameLabel)
group = QtWidgets.QGroupBox('Properties')
groupLayout = QtWidgets.QVBoxLayout()
group.setLayout(groupLayout)
controlsLayout.addWidget(group)
camera = ctx.camera
for cid, cv in camera.properties.items():
lab = QtWidgets.QLabel()
lab.setText('{} = {}'.format(cid, cv))
groupLayout.addWidget(lab)
group = QtWidgets.QGroupBox('Controls')
groupLayout = QtWidgets.QVBoxLayout()
group.setLayout(groupLayout)
controlsLayout.addWidget(group)
for cid, cinfo in camera.controls.items():
lab = QtWidgets.QLabel()
lab.setText('{} = {}/{}/{}'
.format(cid, cinfo.min, cinfo.max, cinfo.default))
groupLayout.addWidget(lab)
controlsLayout.addStretch()
def buf_to_qpixmap(self, stream, mfb):
cfg = stream.configuration
if cfg.pixel_format == libcam.formats.MJPEG:
pix = QtGui.QPixmap(cfg.size.width, cfg.size.height)
pix.loadFromData(mfb.planes[0])
else:
rgb = mfb_to_rgb(mfb, cfg)
if rgb is None:
raise Exception('Format not supported: ' + cfg.pixel_format)
pix = rgb_to_pix(rgb)
return pix
def handle_request(self, stream, mfb):
ctx = self.ctx
pix = self.buf_to_qpixmap(stream, mfb)
self.label.setPixmap(pix)
self.frameLabel.setText('Queued: {}\nDone: {}\nFps: {:.2f}'
.format(ctx.reqs_queued, ctx.reqs_completed, ctx.fps))
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/helpers.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
#
# Debayering code from PiCamera documentation
from numpy.lib.stride_tricks import as_strided
import libcamera as libcam
import libcamera.utils
import numpy as np
def demosaic(data, r0, g0, g1, b0):
# Separate the components from the Bayer data to RGB planes
rgb = np.zeros(data.shape + (3,), dtype=data.dtype)
rgb[r0[1]::2, r0[0]::2, 0] = data[r0[1]::2, r0[0]::2] # Red
rgb[g0[1]::2, g0[0]::2, 1] = data[g0[1]::2, g0[0]::2] # Green
rgb[g1[1]::2, g1[0]::2, 1] = data[g1[1]::2, g1[0]::2] # Green
rgb[b0[1]::2, b0[0]::2, 2] = data[b0[1]::2, b0[0]::2] # Blue
# Below we present a fairly naive de-mosaic method that simply
# calculates the weighted average of a pixel based on the pixels
# surrounding it. The weighting is provided by a byte representation of
# the Bayer filter which we construct first:
bayer = np.zeros(rgb.shape, dtype=np.uint8)
bayer[r0[1]::2, r0[0]::2, 0] = 1 # Red
bayer[g0[1]::2, g0[0]::2, 1] = 1 # Green
bayer[g1[1]::2, g1[0]::2, 1] = 1 # Green
bayer[b0[1]::2, b0[0]::2, 2] = 1 # Blue
# Allocate an array to hold our output with the same shape as the input
# data. After this we define the size of window that will be used to
# calculate each weighted average (3x3). Then we pad out the rgb and
# bayer arrays, adding blank pixels at their edges to compensate for the
# size of the window when calculating averages for edge pixels.
output = np.empty(rgb.shape, dtype=rgb.dtype)
window = (3, 3)
borders = (window[0] - 1, window[1] - 1)
border = (borders[0] // 2, borders[1] // 2)
rgb = np.pad(rgb, [
(border[0], border[0]),
(border[1], border[1]),
(0, 0),
], 'constant')
bayer = np.pad(bayer, [
(border[0], border[0]),
(border[1], border[1]),
(0, 0),
], 'constant')
# For each plane in the RGB data, we use a nifty numpy trick
# (as_strided) to construct a view over the plane of 3x3 matrices. We do
# the same for the bayer array, then use Einstein summation on each
# (np.sum is simpler, but copies the data so it's slower), and divide
# the results to get our weighted average:
for plane in range(3):
p = rgb[..., plane]
b = bayer[..., plane]
pview = as_strided(p, shape=(
p.shape[0] - borders[0],
p.shape[1] - borders[1]) + window, strides=p.strides * 2)
bview = as_strided(b, shape=(
b.shape[0] - borders[0],
b.shape[1] - borders[1]) + window, strides=b.strides * 2)
psum = np.einsum('ijkl->ij', pview)
bsum = np.einsum('ijkl->ij', bview)
output[..., plane] = psum // bsum
return output
def to_rgb(fmt, size, data):
w = size.width
h = size.height
if fmt == libcam.formats.YUYV:
# YUV422
yuyv = data.reshape((h, w // 2 * 4))
# YUV444
yuv = np.empty((h, w, 3), dtype=np.uint8)
yuv[:, :, 0] = yuyv[:, 0::2] # Y
yuv[:, :, 1] = yuyv[:, 1::4].repeat(2, axis=1) # U
yuv[:, :, 2] = yuyv[:, 3::4].repeat(2, axis=1) # V
m = np.array([
[1.0, 1.0, 1.0],
[-0.000007154783816076815, -0.3441331386566162, 1.7720025777816772],
[1.4019975662231445, -0.7141380310058594, 0.00001542569043522235]
])
rgb = np.dot(yuv, m)
rgb[:, :, 0] -= 179.45477266423404
rgb[:, :, 1] += 135.45870971679688
rgb[:, :, 2] -= 226.8183044444304
rgb = rgb.astype(np.uint8)
elif fmt == libcam.formats.RGB888:
rgb = data.reshape((h, w, 3))
rgb[:, :, [0, 1, 2]] = rgb[:, :, [2, 1, 0]]
elif fmt == libcam.formats.BGR888:
rgb = data.reshape((h, w, 3))
elif fmt in [libcam.formats.ARGB8888, libcam.formats.XRGB8888]:
rgb = data.reshape((h, w, 4))
rgb = np.flip(rgb, axis=2)
# drop alpha component
rgb = np.delete(rgb, np.s_[0::4], axis=2)
elif str(fmt).startswith('S'):
fmt = str(fmt)
bayer_pattern = fmt[1:5]
bitspp = int(fmt[5:])
if bitspp == 8:
data = data.reshape((h, w))
data = data.astype(np.uint16)
elif bitspp in [10, 12]:
data = data.view(np.uint16)
data = data.reshape((h, w))
else:
raise Exception('Bad bitspp:' + str(bitspp))
idx = bayer_pattern.find('R')
assert(idx != -1)
r0 = (idx % 2, idx // 2)
idx = bayer_pattern.find('G')
assert(idx != -1)
g0 = (idx % 2, idx // 2)
idx = bayer_pattern.find('G', idx + 1)
assert(idx != -1)
g1 = (idx % 2, idx // 2)
idx = bayer_pattern.find('B')
assert(idx != -1)
b0 = (idx % 2, idx // 2)
rgb = demosaic(data, r0, g0, g1, b0)
rgb = (rgb >> (bitspp - 8)).astype(np.uint8)
else:
rgb = None
return rgb
# A naive format conversion to 24-bit RGB
def mfb_to_rgb(mfb: libcamera.utils.MappedFrameBuffer, cfg: libcam.StreamConfiguration):
data = np.array(mfb.planes[0], dtype=np.uint8)
rgb = to_rgb(cfg.pixel_format, cfg.size, data)
return rgb
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/cam_qtgl.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import Qt
import math
import os
import sys
os.environ['PYOPENGL_PLATFORM'] = 'egl'
from OpenGL.EGL.EXT.image_dma_buf_import import *
from OpenGL.EGL.KHR.image import *
from OpenGL.EGL.VERSION.EGL_1_0 import *
from OpenGL.EGL.VERSION.EGL_1_2 import *
from OpenGL.EGL.VERSION.EGL_1_3 import *
from OpenGL.GLES2.OES.EGL_image import *
from OpenGL.GLES2.OES.EGL_image_external import *
from OpenGL.GLES2.VERSION.GLES2_2_0 import *
from OpenGL.GLES3.VERSION.GLES3_3_0 import *
from OpenGL.GL import shaders
from gl_helpers import *
class EglState:
def __init__(self):
self.create_display()
self.choose_config()
self.create_context()
self.check_extensions()
def create_display(self):
xdpy = getEGLNativeDisplay()
dpy = eglGetDisplay(xdpy)
self.display = dpy
def choose_config(self):
dpy = self.display
major, minor = EGLint(), EGLint()
b = eglInitialize(dpy, major, minor)
assert(b)
print('EGL {} {}'.format(
eglQueryString(dpy, EGL_VENDOR).decode(),
eglQueryString(dpy, EGL_VERSION).decode()))
check_egl_extensions(dpy, ['EGL_EXT_image_dma_buf_import'])
b = eglBindAPI(EGL_OPENGL_ES_API)
assert(b)
def print_config(dpy, cfg):
def getconf(a):
value = ctypes.c_long()
eglGetConfigAttrib(dpy, cfg, a, value)
return value.value
print('EGL Config {}: color buf {}/{}/{}/{} = {}, depth {}, stencil {}, native visualid {}, native visualtype {}'.format(
getconf(EGL_CONFIG_ID),
getconf(EGL_ALPHA_SIZE),
getconf(EGL_RED_SIZE),
getconf(EGL_GREEN_SIZE),
getconf(EGL_BLUE_SIZE),
getconf(EGL_BUFFER_SIZE),
getconf(EGL_DEPTH_SIZE),
getconf(EGL_STENCIL_SIZE),
getconf(EGL_NATIVE_VISUAL_ID),
getconf(EGL_NATIVE_VISUAL_TYPE)))
if False:
num_configs = ctypes.c_long()
eglGetConfigs(dpy, None, 0, num_configs)
print('{} configs'.format(num_configs.value))
configs = (EGLConfig * num_configs.value)()
eglGetConfigs(dpy, configs, num_configs.value, num_configs)
for config_id in configs:
print_config(dpy, config_id)
config_attribs = [
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 0,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_NONE,
]
n = EGLint()
configs = (EGLConfig * 1)()
b = eglChooseConfig(dpy, config_attribs, configs, 1, n)
assert(b and n.value == 1)
config = configs[0]
print('Chosen Config:')
print_config(dpy, config)
self.config = config
def create_context(self):
dpy = self.display
context_attribs = [
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE,
]
context = eglCreateContext(dpy, self.config, EGL_NO_CONTEXT, context_attribs)
assert(context)
b = eglMakeCurrent(dpy, EGL_NO_SURFACE, EGL_NO_SURFACE, context)
assert(b)
self.context = context
def check_extensions(self):
check_gl_extensions(['GL_OES_EGL_image'])
assert(eglCreateImageKHR)
assert(eglDestroyImageKHR)
assert(glEGLImageTargetTexture2DOES)
class QtRenderer:
def __init__(self, state):
self.state = state
def setup(self):
self.app = QtWidgets.QApplication([])
window = MainWindow(self.state)
window.show()
self.window = window
def run(self):
camnotif = QtCore.QSocketNotifier(self.state.cm.event_fd, QtCore.QSocketNotifier.Read)
camnotif.activated.connect(lambda _: self.readcam())
keynotif = QtCore.QSocketNotifier(sys.stdin.fileno(), QtCore.QSocketNotifier.Read)
keynotif.activated.connect(lambda _: self.readkey())
print('Capturing...')
self.app.exec()
print('Exiting...')
def readcam(self):
running = self.state.event_handler()
if not running:
self.app.quit()
def readkey(self):
sys.stdin.readline()
self.app.quit()
def request_handler(self, ctx, req):
self.window.handle_request(ctx, req)
def cleanup(self):
self.window.close()
class MainWindow(QtWidgets.QWidget):
def __init__(self, state):
super().__init__()
self.setAttribute(Qt.WA_PaintOnScreen)
self.setAttribute(Qt.WA_NativeWindow)
self.state = state
self.textures = {}
self.reqqueue = {}
self.current = {}
for ctx in self.state.contexts:
self.reqqueue[ctx.idx] = []
self.current[ctx.idx] = []
for stream in ctx.streams:
self.textures[stream] = None
num_tiles = len(self.textures)
self.num_columns = math.ceil(math.sqrt(num_tiles))
self.num_rows = math.ceil(num_tiles / self.num_columns)
self.egl = EglState()
self.surface = None
def paintEngine(self):
return None
def create_surface(self):
native_surface = c_void_p(self.winId().__int__())
surface = eglCreateWindowSurface(self.egl.display, self.egl.config,
native_surface, None)
b = eglMakeCurrent(self.egl.display, self.surface, self.surface, self.egl.context)
assert(b)
self.surface = surface
def init_gl(self):
self.create_surface()
vertShaderSrc = '''
attribute vec2 aPosition;
varying vec2 texcoord;
void main()
{
gl_Position = vec4(aPosition * 2.0 - 1.0, 0.0, 1.0);
texcoord.x = aPosition.x;
texcoord.y = 1.0 - aPosition.y;
}
'''
fragShaderSrc = '''
#extension GL_OES_EGL_image_external : enable
precision mediump float;
varying vec2 texcoord;
uniform samplerExternalOES texture;
void main()
{
gl_FragColor = texture2D(texture, texcoord);
}
'''
program = shaders.compileProgram(
shaders.compileShader(vertShaderSrc, GL_VERTEX_SHADER),
shaders.compileShader(fragShaderSrc, GL_FRAGMENT_SHADER)
)
glUseProgram(program)
glClearColor(0.5, 0.8, 0.7, 1.0)
vertPositions = [
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
inputAttrib = glGetAttribLocation(program, 'aPosition')
glVertexAttribPointer(inputAttrib, 2, GL_FLOAT, GL_FALSE, 0, vertPositions)
glEnableVertexAttribArray(inputAttrib)
def create_texture(self, stream, fb):
cfg = stream.configuration
fmt = cfg.pixel_format.fourcc
w = cfg.size.width
h = cfg.size.height
attribs = [
EGL_WIDTH, w,
EGL_HEIGHT, h,
EGL_LINUX_DRM_FOURCC_EXT, fmt,
EGL_DMA_BUF_PLANE0_FD_EXT, fb.planes[0].fd,
EGL_DMA_BUF_PLANE0_OFFSET_EXT, 0,
EGL_DMA_BUF_PLANE0_PITCH_EXT, cfg.stride,
EGL_NONE,
]
image = eglCreateImageKHR(self.egl.display,
EGL_NO_CONTEXT,
EGL_LINUX_DMA_BUF_EXT,
None,
attribs)
assert(image)
textures = glGenTextures(1)
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textures)
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE)
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE)
glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image)
return textures
def resizeEvent(self, event):
size = event.size()
print('Resize', size)
super().resizeEvent(event)
if self.surface is None:
return
glViewport(0, 0, size.width() // 2, size.height())
def paintEvent(self, event):
if self.surface is None:
self.init_gl()
for ctx_idx, queue in self.reqqueue.items():
if len(queue) == 0:
continue
ctx = next(ctx for ctx in self.state.contexts if ctx.idx == ctx_idx)
if self.current[ctx_idx]:
old = self.current[ctx_idx]
self.current[ctx_idx] = None
self.state.request_processed(ctx, old)
next_req = queue.pop(0)
self.current[ctx_idx] = next_req
stream, fb = next(iter(next_req.buffers.items()))
self.textures[stream] = self.create_texture(stream, fb)
self.paint_gl()
def paint_gl(self):
b = eglMakeCurrent(self.egl.display, self.surface, self.surface, self.egl.context)
assert(b)
glClear(GL_COLOR_BUFFER_BIT)
size = self.size()
for idx, ctx in enumerate(self.state.contexts):
for stream in ctx.streams:
if self.textures[stream] is None:
continue
w = size.width() // self.num_columns
h = size.height() // self.num_rows
x = idx % self.num_columns
y = idx // self.num_columns
x *= w
y *= h
glViewport(x, y, w, h)
glBindTexture(GL_TEXTURE_EXTERNAL_OES, self.textures[stream])
glDrawArrays(GL_TRIANGLE_FAN, 0, 4)
b = eglSwapBuffers(self.egl.display, self.surface)
assert(b)
def handle_request(self, ctx, req):
self.reqqueue[ctx.idx].append(req)
self.update()
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/cam_null.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
import selectors
import sys
class NullRenderer:
def __init__(self, state):
self.state = state
self.cm = state.cm
self.contexts = state.contexts
self.running = False
def setup(self):
pass
def run(self):
print('Capturing...')
self.running = True
sel = selectors.DefaultSelector()
sel.register(self.cm.event_fd, selectors.EVENT_READ, self.readcam)
sel.register(sys.stdin, selectors.EVENT_READ, self.readkey)
print('Press enter to exit')
while self.running:
events = sel.select()
for key, mask in events:
callback = key.data
callback(key.fileobj)
print('Exiting...')
def readcam(self, fd):
self.running = self.state.event_handler()
def readkey(self, fileobj):
sys.stdin.readline()
self.running = False
def request_handler(self, ctx, req):
self.state.request_processed(ctx, req)
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/cam/cam_kms.py | # SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
import pykms
import selectors
import sys
class KMSRenderer:
def __init__(self, state):
self.state = state
self.cm = state.cm
self.contexts = state.contexts
self.running = False
card = pykms.Card()
res = pykms.ResourceManager(card)
conn = res.reserve_connector()
crtc = res.reserve_crtc(conn)
mode = conn.get_default_mode()
modeb = mode.to_blob(card)
req = pykms.AtomicReq(card)
req.add_connector(conn, crtc)
req.add_crtc(crtc, modeb)
r = req.commit_sync(allow_modeset=True)
assert(r == 0)
self.card = card
self.resman = res
self.crtc = crtc
self.mode = mode
self.bufqueue = []
self.current = None
self.next = None
self.cam_2_drm = {}
# KMS
def close(self):
req = pykms.AtomicReq(self.card)
for s in self.streams:
req.add_plane(s['plane'], None, None, dst=(0, 0, 0, 0))
req.commit()
def add_plane(self, req, stream, fb):
s = next(s for s in self.streams if s['stream'] == stream)
idx = s['idx']
plane = s['plane']
if idx % 2 == 0:
x = 0
else:
x = self.mode.hdisplay - fb.width
if idx // 2 == 0:
y = 0
else:
y = self.mode.vdisplay - fb.height
req.add_plane(plane, fb, self.crtc, dst=(x, y, fb.width, fb.height))
def apply_request(self, drmreq):
buffers = drmreq['camreq'].buffers
req = pykms.AtomicReq(self.card)
for stream, fb in buffers.items():
drmfb = self.cam_2_drm.get(fb, None)
self.add_plane(req, stream, drmfb)
req.commit()
def handle_page_flip(self, frame, time):
old = self.current
self.current = self.next
if len(self.bufqueue) > 0:
self.next = self.bufqueue.pop(0)
else:
self.next = None
if self.next:
drmreq = self.next
self.apply_request(drmreq)
if old:
req = old['camreq']
ctx = old['camctx']
self.state.request_processed(ctx, req)
def queue(self, drmreq):
if not self.next:
self.next = drmreq
self.apply_request(drmreq)
else:
self.bufqueue.append(drmreq)
# libcamera
def setup(self):
self.streams = []
idx = 0
for ctx in self.contexts:
for stream in ctx.streams:
cfg = stream.configuration
fmt = cfg.pixel_format
fmt = pykms.PixelFormat(fmt.fourcc)
plane = self.resman.reserve_generic_plane(self.crtc, fmt)
assert(plane is not None)
self.streams.append({
'idx': idx,
'stream': stream,
'plane': plane,
'fmt': fmt,
'size': cfg.size,
})
for fb in ctx.allocator.buffers(stream):
w = cfg.size.width
h = cfg.size.height
fds = []
strides = []
offsets = []
for plane in fb.planes:
fds.append(plane.fd)
strides.append(cfg.stride)
offsets.append(plane.offset)
drmfb = pykms.DmabufFramebuffer(self.card, w, h, fmt,
fds, strides, offsets)
self.cam_2_drm[fb] = drmfb
idx += 1
def readdrm(self, fileobj):
for ev in self.card.read_events():
if ev.type == pykms.DrmEventType.FLIP_COMPLETE:
self.handle_page_flip(ev.seq, ev.time)
def readcam(self, fd):
self.running = self.state.event_handler()
def readkey(self, fileobj):
sys.stdin.readline()
self.running = False
def run(self):
print('Capturing...')
self.running = True
sel = selectors.DefaultSelector()
sel.register(self.card.fd, selectors.EVENT_READ, self.readdrm)
sel.register(self.cm.event_fd, selectors.EVENT_READ, self.readcam)
sel.register(sys.stdin, selectors.EVENT_READ, self.readkey)
print('Press enter to exit')
while self.running:
events = sel.select()
for key, mask in events:
callback = key.data
callback(key.fileobj)
print('Exiting...')
def request_handler(self, ctx, req):
drmreq = {
'camctx': ctx,
'camreq': req,
}
self.queue(drmreq)
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_main.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*/
#pragma once
#include <libcamera/base/log.h>
namespace libcamera {
LOG_DECLARE_CATEGORY(Python)
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_formats_generated.cpp.in | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Auto-generated formats
*
* This file is auto-generated. Do not edit.
*/
#include <libcamera/formats.h>
#include <pybind11/pybind11.h>
namespace py = pybind11;
class PyFormats
{
};
void init_py_formats_generated(py::module& m)
{
py::class_<PyFormats>(m, "formats")
${formats}
;
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_geometry.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Geometry classes
*/
#include <array>
#include <libcamera/geometry.h>
#include <libcamera/libcamera.h>
#include <pybind11/operators.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
namespace py = pybind11;
using namespace libcamera;
void init_py_geometry(py::module &m)
{
auto pyPoint = py::class_<Point>(m, "Point");
auto pySize = py::class_<Size>(m, "Size");
auto pySizeRange = py::class_<SizeRange>(m, "SizeRange");
auto pyRectangle = py::class_<Rectangle>(m, "Rectangle");
pyPoint
.def(py::init<>())
.def(py::init<int, int>())
.def_readwrite("x", &Point::x)
.def_readwrite("y", &Point::y)
.def(py::self == py::self)
.def(-py::self)
.def("__str__", &Point::toString)
.def("__repr__", [](const Point &self) {
return py::str("libcamera.Point({}, {})")
.format(self.x, self.y);
});
pySize
.def(py::init<>())
.def(py::init<unsigned int, unsigned int>())
.def_readwrite("width", &Size::width)
.def_readwrite("height", &Size::height)
.def_property_readonly("is_null", &Size::isNull)
.def("align_down_to", &Size::alignDownTo)
.def("align_up_to", &Size::alignUpTo)
.def("bound_to", &Size::boundTo)
.def("expand_to", &Size::expandTo)
.def("grow_by", &Size::growBy)
.def("shrink_by", &Size::shrinkBy)
.def("aligned_up_to", &Size::alignedUpTo)
.def("aligned_up_to", &Size::alignedUpTo)
.def("bounded_to", &Size::boundedTo)
.def("expanded_to", &Size::expandedTo)
.def("grown_by", &Size::grownBy)
.def("shrunk_by", &Size::shrunkBy)
.def("bounded_to_aspect_ratio", &Size::boundedToAspectRatio)
.def("expanded_to_aspect_ratio", &Size::expandedToAspectRatio)
.def("centered_to", &Size::centeredTo)
.def(py::self == py::self)
.def(py::self < py::self)
.def(py::self <= py::self)
.def(py::self * float())
.def(py::self / float())
.def(py::self *= float())
.def(py::self /= float())
.def("__str__", &Size::toString)
.def("__repr__", [](const Size &self) {
return py::str("libcamera.Size({}, {})")
.format(self.width, self.height);
});
pySizeRange
.def(py::init<>())
.def(py::init<Size>())
.def(py::init<Size, Size>())
.def(py::init<Size, Size, unsigned int, unsigned int>())
.def_readwrite("min", &SizeRange::min)
.def_readwrite("max", &SizeRange::max)
.def_readwrite("hStep", &SizeRange::hStep)
.def_readwrite("vStep", &SizeRange::vStep)
.def("contains", &SizeRange::contains)
.def(py::self == py::self)
.def("__str__", &SizeRange::toString)
.def("__repr__", [](const SizeRange &self) {
return py::str("libcamera.SizeRange(({}, {}), ({}, {}), {}, {})")
.format(self.min.width, self.min.height,
self.max.width, self.max.height,
self.hStep, self.vStep);
});
pyRectangle
.def(py::init<>())
.def(py::init<int, int, Size>())
.def(py::init<int, int, unsigned int, unsigned int>())
.def(py::init<Size>())
.def_readwrite("x", &Rectangle::x)
.def_readwrite("y", &Rectangle::y)
.def_readwrite("width", &Rectangle::width)
.def_readwrite("height", &Rectangle::height)
.def_property_readonly("is_null", &Rectangle::isNull)
.def_property_readonly("center", &Rectangle::center)
.def_property_readonly("size", &Rectangle::size)
.def_property_readonly("topLeft", &Rectangle::topLeft)
.def("scale_by", &Rectangle::scaleBy)
.def("translate_by", &Rectangle::translateBy)
.def("bounded_to", &Rectangle::boundedTo)
.def("enclosed_in", &Rectangle::enclosedIn)
.def("scaled_by", &Rectangle::scaledBy)
.def("translated_by", &Rectangle::translatedBy)
.def(py::self == py::self)
.def("__str__", &Rectangle::toString)
.def("__repr__", [](const Rectangle &self) {
return py::str("libcamera.Rectangle({}, {}, {}, {})")
.format(self.x, self.y, self.width, self.height);
});
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_enums.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Enumerations
*/
#include <libcamera/libcamera.h>
#include <pybind11/pybind11.h>
namespace py = pybind11;
using namespace libcamera;
void init_py_enums(py::module &m)
{
py::enum_<StreamRole>(m, "StreamRole")
.value("StillCapture", StreamRole::StillCapture)
.value("Raw", StreamRole::Raw)
.value("VideoRecording", StreamRole::VideoRecording)
.value("Viewfinder", StreamRole::Viewfinder);
py::enum_<ControlType>(m, "ControlType")
.value("Null", ControlType::ControlTypeNone)
.value("Bool", ControlType::ControlTypeBool)
.value("Byte", ControlType::ControlTypeByte)
.value("Integer32", ControlType::ControlTypeInteger32)
.value("Integer64", ControlType::ControlTypeInteger64)
.value("Float", ControlType::ControlTypeFloat)
.value("String", ControlType::ControlTypeString)
.value("Rectangle", ControlType::ControlTypeRectangle)
.value("Size", ControlType::ControlTypeSize);
py::enum_<Orientation>(m, "Orientation")
.value("Rotate0", Orientation::Rotate0)
.value("Rotate0Mirror", Orientation::Rotate0Mirror)
.value("Rotate180", Orientation::Rotate180)
.value("Rotate180Mirror", Orientation::Rotate180Mirror)
.value("Rotate90Mirror", Orientation::Rotate90Mirror)
.value("Rotate270", Orientation::Rotate270)
.value("Rotate270Mirror", Orientation::Rotate270Mirror)
.value("Rotate90", Orientation::Rotate90);
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_helpers.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*/
#pragma once
#include <libcamera/libcamera.h>
#include <pybind11/pybind11.h>
pybind11::object controlValueToPy(const libcamera::ControlValue &cv);
libcamera::ControlValue pyToControlValue(const pybind11::object &ob, libcamera::ControlType type);
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_main.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings
*/
#include "py_main.h"
#include <memory>
#include <stdexcept>
#include <string>
#include <vector>
#include <libcamera/base/log.h>
#include <libcamera/libcamera.h>
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
#include "py_camera_manager.h"
#include "py_helpers.h"
namespace py = pybind11;
using namespace libcamera;
namespace libcamera {
LOG_DEFINE_CATEGORY(Python)
}
/*
* This is a holder class used only for the Camera class, for the sole purpose
* of avoiding the compilation issue with Camera's private destructor.
*
* pybind11 requires a public destructor for classes held with shared_ptrs, even
* in cases where the public destructor is not strictly needed. The current
* understanding is that there are the following options to solve the problem:
*
* - Use pybind11 'smart_holder' branch. The downside is that 'smart_holder'
* is not the mainline branch, and not available in distributions.
* - https://github.com/pybind/pybind11/pull/2067
* - Make the Camera destructor public
* - Something like the PyCameraSmartPtr here, which adds a layer, hiding the
* issue.
*/
template<typename T>
class PyCameraSmartPtr
{
public:
using element_type = T;
PyCameraSmartPtr()
{
}
explicit PyCameraSmartPtr(T *)
{
throw std::runtime_error("invalid SmartPtr constructor call");
}
explicit PyCameraSmartPtr(std::shared_ptr<T> p)
: ptr_(p)
{
}
T *get() const { return ptr_.get(); }
operator std::shared_ptr<T>() const { return ptr_; }
private:
std::shared_ptr<T> ptr_;
};
PYBIND11_DECLARE_HOLDER_TYPE(T, PyCameraSmartPtr<T>)
/*
* Note: global C++ destructors can be ran on this before the py module is
* destructed.
*/
static std::weak_ptr<PyCameraManager> gCameraManager;
void init_py_color_space(py::module &m);
void init_py_controls_generated(py::module &m);
void init_py_enums(py::module &m);
void init_py_formats_generated(py::module &m);
void init_py_geometry(py::module &m);
void init_py_properties_generated(py::module &m);
void init_py_transform(py::module &m);
PYBIND11_MODULE(_libcamera, m)
{
init_py_enums(m);
init_py_controls_generated(m);
init_py_geometry(m);
init_py_properties_generated(m);
init_py_color_space(m);
init_py_transform(m);
/* Forward declarations */
/*
* We need to declare all the classes here so that Python docstrings
* can be generated correctly.
* https://pybind11.readthedocs.io/en/latest/advanced/misc.html#avoiding-c-types-in-docstrings
*/
auto pyCameraManager = py::class_<PyCameraManager, std::shared_ptr<PyCameraManager>>(m, "CameraManager");
auto pyCamera = py::class_<Camera, PyCameraSmartPtr<Camera>>(m, "Camera");
auto pySensorConfiguration = py::class_<SensorConfiguration>(m, "SensorConfiguration");
auto pyCameraConfiguration = py::class_<CameraConfiguration>(m, "CameraConfiguration");
auto pyCameraConfigurationStatus = py::enum_<CameraConfiguration::Status>(pyCameraConfiguration, "Status");
auto pyStreamConfiguration = py::class_<StreamConfiguration>(m, "StreamConfiguration");
auto pyStreamFormats = py::class_<StreamFormats>(m, "StreamFormats");
auto pyFrameBufferAllocator = py::class_<FrameBufferAllocator>(m, "FrameBufferAllocator");
auto pyFrameBuffer = py::class_<FrameBuffer>(m, "FrameBuffer");
auto pyFrameBufferPlane = py::class_<FrameBuffer::Plane>(pyFrameBuffer, "Plane");
auto pyStream = py::class_<Stream>(m, "Stream");
auto pyControlId = py::class_<ControlId>(m, "ControlId");
auto pyControlInfo = py::class_<ControlInfo>(m, "ControlInfo");
auto pyRequest = py::class_<Request>(m, "Request");
auto pyRequestStatus = py::enum_<Request::Status>(pyRequest, "Status");
auto pyRequestReuse = py::enum_<Request::ReuseFlag>(pyRequest, "Reuse");
auto pyFrameMetadata = py::class_<FrameMetadata>(m, "FrameMetadata");
auto pyFrameMetadataStatus = py::enum_<FrameMetadata::Status>(pyFrameMetadata, "Status");
auto pyFrameMetadataPlane = py::class_<FrameMetadata::Plane>(pyFrameMetadata, "Plane");
auto pyPixelFormat = py::class_<PixelFormat>(m, "PixelFormat");
init_py_formats_generated(m);
/* Global functions */
m.def("log_set_level", &logSetLevel);
/* Classes */
pyCameraManager
.def_static("singleton", []() {
std::shared_ptr<PyCameraManager> cm = gCameraManager.lock();
if (!cm) {
cm = std::make_shared<PyCameraManager>();
gCameraManager = cm;
}
return cm;
})
.def_property_readonly_static("version", [](py::object /* self */) { return PyCameraManager::version(); })
.def("get", &PyCameraManager::get, py::keep_alive<0, 1>())
.def_property_readonly("cameras", &PyCameraManager::cameras)
.def_property_readonly("event_fd", &PyCameraManager::eventFd)
.def("get_ready_requests", &PyCameraManager::getReadyRequests);
pyCamera
.def_property_readonly("id", &Camera::id)
.def("acquire", [](Camera &self) {
int ret = self.acquire();
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to acquire camera");
})
.def("release", [](Camera &self) {
int ret = self.release();
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to release camera");
})
.def("start", [](Camera &self,
const std::unordered_map<const ControlId *, py::object> &controls) {
/* \todo What happens if someone calls start() multiple times? */
auto cm = gCameraManager.lock();
ASSERT(cm);
self.requestCompleted.connect(cm.get(), &PyCameraManager::handleRequestCompleted);
ControlList controlList(self.controls());
for (const auto &[id, obj] : controls) {
auto val = pyToControlValue(obj, id->type());
controlList.set(id->id(), val);
}
int ret = self.start(&controlList);
if (ret) {
self.requestCompleted.disconnect();
throw std::system_error(-ret, std::generic_category(),
"Failed to start camera");
}
}, py::arg("controls") = std::unordered_map<const ControlId *, py::object>())
.def("stop", [](Camera &self) {
int ret = self.stop();
self.requestCompleted.disconnect();
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to stop camera");
})
.def("__str__", [](Camera &self) {
return "<libcamera.Camera '" + self.id() + "'>";
})
/* Keep the camera alive, as StreamConfiguration contains a Stream* */
.def("generate_configuration", [](Camera &self, const std::vector<StreamRole> &roles) {
return self.generateConfiguration(roles);
}, py::keep_alive<0, 1>())
.def("configure", [](Camera &self, CameraConfiguration *config) {
int ret = self.configure(config);
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to configure camera");
})
.def("create_request", [](Camera &self, uint64_t cookie) {
std::unique_ptr<Request> req = self.createRequest(cookie);
if (!req)
throw std::system_error(ENOMEM, std::generic_category(),
"Failed to create request");
return req;
}, py::arg("cookie") = 0)
.def("queue_request", [](Camera &self, Request *req) {
py::object py_req = py::cast(req);
/*
* Increase the reference count, will be dropped in
* CameraManager.get_ready_requests().
*/
py_req.inc_ref();
int ret = self.queueRequest(req);
if (ret) {
py_req.dec_ref();
throw std::system_error(-ret, std::generic_category(),
"Failed to queue request");
}
})
.def_property_readonly("streams", [](Camera &self) {
py::set set;
for (auto &s : self.streams()) {
py::object py_self = py::cast(self);
py::object py_s = py::cast(s);
py::detail::keep_alive_impl(py_s, py_self);
set.add(py_s);
}
return set;
})
.def_property_readonly("controls", [](Camera &self) {
/* Convert ControlInfoMap to std container */
std::unordered_map<const ControlId *, ControlInfo> ret;
for (const auto &[k, cv] : self.controls())
ret[k] = cv;
return ret;
})
.def_property_readonly("properties", [](Camera &self) {
/* Convert ControlList to std container */
std::unordered_map<const ControlId *, py::object> ret;
for (const auto &[k, cv] : self.properties()) {
const ControlId *id = properties::properties.at(k);
py::object ob = controlValueToPy(cv);
ret[id] = ob;
}
return ret;
});
pySensorConfiguration
.def(py::init<>())
.def_readwrite("bit_depth", &SensorConfiguration::bitDepth)
.def_readwrite("analog_crop", &SensorConfiguration::analogCrop)
.def_property(
"binning",
[](SensorConfiguration &self) {
return py::make_tuple(self.binning.binX, self.binning.binY);
},
[](SensorConfiguration &self, py::object value) {
auto vec = value.cast<std::vector<unsigned int>>();
if (vec.size() != 2)
throw std::runtime_error("binning requires iterable of 2 values");
self.binning.binX = vec[0];
self.binning.binY = vec[1];
})
.def_property(
"skipping",
[](SensorConfiguration &self) {
return py::make_tuple(self.skipping.xOddInc, self.skipping.xEvenInc,
self.skipping.yOddInc, self.skipping.yEvenInc);
},
[](SensorConfiguration &self, py::object value) {
auto vec = value.cast<std::vector<unsigned int>>();
if (vec.size() != 4)
throw std::runtime_error("skipping requires iterable of 4 values");
self.skipping.xOddInc = vec[0];
self.skipping.xEvenInc = vec[1];
self.skipping.yOddInc = vec[2];
self.skipping.yEvenInc = vec[3];
})
.def_readwrite("output_size", &SensorConfiguration::outputSize)
.def("is_valid", &SensorConfiguration::isValid);
pyCameraConfiguration
.def("__iter__", [](CameraConfiguration &self) {
return py::make_iterator<py::return_value_policy::reference_internal>(self);
}, py::keep_alive<0, 1>())
.def("__len__", [](CameraConfiguration &self) {
return self.size();
})
.def("validate", &CameraConfiguration::validate)
.def("at", py::overload_cast<unsigned int>(&CameraConfiguration::at),
py::return_value_policy::reference_internal)
.def_property_readonly("size", &CameraConfiguration::size)
.def_property_readonly("empty", &CameraConfiguration::empty)
.def_readwrite("sensor_config", &CameraConfiguration::sensorConfig)
.def_readwrite("orientation", &CameraConfiguration::orientation);
pyCameraConfigurationStatus
.value("Valid", CameraConfiguration::Valid)
.value("Adjusted", CameraConfiguration::Adjusted)
.value("Invalid", CameraConfiguration::Invalid);
pyStreamConfiguration
.def("__str__", &StreamConfiguration::toString)
.def_property_readonly("stream", &StreamConfiguration::stream,
py::return_value_policy::reference_internal)
.def_readwrite("size", &StreamConfiguration::size)
.def_readwrite("pixel_format", &StreamConfiguration::pixelFormat)
.def_readwrite("stride", &StreamConfiguration::stride)
.def_readwrite("frame_size", &StreamConfiguration::frameSize)
.def_readwrite("buffer_count", &StreamConfiguration::bufferCount)
.def_property_readonly("formats", &StreamConfiguration::formats,
py::return_value_policy::reference_internal)
.def_readwrite("color_space", &StreamConfiguration::colorSpace);
pyStreamFormats
.def_property_readonly("pixel_formats", &StreamFormats::pixelformats)
.def("sizes", &StreamFormats::sizes)
.def("range", &StreamFormats::range);
pyFrameBufferAllocator
.def(py::init<PyCameraSmartPtr<Camera>>(), py::keep_alive<1, 2>())
.def("allocate", [](FrameBufferAllocator &self, Stream *stream) {
int ret = self.allocate(stream);
if (ret < 0)
throw std::system_error(-ret, std::generic_category(),
"Failed to allocate buffers");
return ret;
})
.def_property_readonly("allocated", &FrameBufferAllocator::allocated)
/* Create a list of FrameBuffers, where each FrameBuffer has a keep-alive to FrameBufferAllocator */
.def("buffers", [](FrameBufferAllocator &self, Stream *stream) {
py::object py_self = py::cast(self);
py::list l;
for (auto &ub : self.buffers(stream)) {
py::object py_buf = py::cast(ub.get(), py::return_value_policy::reference_internal, py_self);
l.append(py_buf);
}
return l;
});
pyFrameBuffer
.def(py::init<std::vector<FrameBuffer::Plane>, unsigned int>(),
py::arg("planes"), py::arg("cookie") = 0)
.def_property_readonly("metadata", &FrameBuffer::metadata, py::return_value_policy::reference_internal)
.def_property_readonly("planes", &FrameBuffer::planes)
.def_property("cookie", &FrameBuffer::cookie, &FrameBuffer::setCookie);
pyFrameBufferPlane
.def(py::init())
.def(py::init([](int fd, unsigned int offset, unsigned int length) {
auto p = FrameBuffer::Plane();
p.fd = SharedFD(fd);
p.offset = offset;
p.length = length;
return p;
}), py::arg("fd"), py::arg("offset"), py::arg("length"))
.def_property("fd",
[](const FrameBuffer::Plane &self) {
return self.fd.get();
},
[](FrameBuffer::Plane &self, int fd) {
self.fd = SharedFD(fd);
})
.def_readwrite("offset", &FrameBuffer::Plane::offset)
.def_readwrite("length", &FrameBuffer::Plane::length);
pyStream
.def_property_readonly("configuration", &Stream::configuration);
pyControlId
.def_property_readonly("id", &ControlId::id)
.def_property_readonly("name", &ControlId::name)
.def_property_readonly("type", &ControlId::type)
.def("__str__", [](const ControlId &self) { return self.name(); })
.def("__repr__", [](const ControlId &self) {
return py::str("libcamera.ControlId({}, {}, {})")
.format(self.id(), self.name(), self.type());
});
pyControlInfo
.def_property_readonly("min", [](const ControlInfo &self) {
return controlValueToPy(self.min());
})
.def_property_readonly("max", [](const ControlInfo &self) {
return controlValueToPy(self.max());
})
.def_property_readonly("default", [](const ControlInfo &self) {
return controlValueToPy(self.def());
})
.def_property_readonly("values", [](const ControlInfo &self) {
py::list l;
for (const auto &v : self.values())
l.append(controlValueToPy(v));
return l;
})
.def("__str__", &ControlInfo::toString)
.def("__repr__", [](const ControlInfo &self) {
return py::str("libcamera.ControlInfo({})")
.format(self.toString());
});
pyRequest
/* \todo Fence is not supported, so we cannot expose addBuffer() directly */
.def("add_buffer", [](Request &self, const Stream *stream, FrameBuffer *buffer) {
int ret = self.addBuffer(stream, buffer);
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to add buffer");
}, py::keep_alive<1, 3>()) /* Request keeps Framebuffer alive */
.def_property_readonly("status", &Request::status)
.def_property_readonly("buffers", &Request::buffers)
.def_property_readonly("cookie", &Request::cookie)
.def_property_readonly("sequence", &Request::sequence)
.def_property_readonly("has_pending_buffers", &Request::hasPendingBuffers)
.def("set_control", [](Request &self, const ControlId &id, py::object value) {
self.controls().set(id.id(), pyToControlValue(value, id.type()));
})
.def_property_readonly("metadata", [](Request &self) {
/* Convert ControlList to std container */
std::unordered_map<const ControlId *, py::object> ret;
for (const auto &[key, cv] : self.metadata()) {
const ControlId *id = controls::controls.at(key);
py::object ob = controlValueToPy(cv);
ret[id] = ob;
}
return ret;
})
/*
* \todo As we add a keep_alive to the fb in addBuffers(), we
* can only allow reuse with ReuseBuffers.
*/
.def("reuse", [](Request &self) { self.reuse(Request::ReuseFlag::ReuseBuffers); })
.def("__str__", &Request::toString);
pyRequestStatus
.value("Pending", Request::RequestPending)
.value("Complete", Request::RequestComplete)
.value("Cancelled", Request::RequestCancelled);
pyRequestReuse
.value("Default", Request::ReuseFlag::Default)
.value("ReuseBuffers", Request::ReuseFlag::ReuseBuffers);
pyFrameMetadata
.def_readonly("status", &FrameMetadata::status)
.def_readonly("sequence", &FrameMetadata::sequence)
.def_readonly("timestamp", &FrameMetadata::timestamp)
.def_property_readonly("planes", [](const FrameMetadata &self) {
/* Convert from Span<> to std::vector<> */
/* Note: this creates a copy */
std::vector<FrameMetadata::Plane> v(self.planes().begin(), self.planes().end());
return v;
});
pyFrameMetadataStatus
.value("Success", FrameMetadata::FrameSuccess)
.value("Error", FrameMetadata::FrameError)
.value("Cancelled", FrameMetadata::FrameCancelled);
pyFrameMetadataPlane
.def_readwrite("bytes_used", &FrameMetadata::Plane::bytesused);
pyPixelFormat
.def(py::init<>())
.def(py::init<uint32_t, uint64_t>())
.def(py::init<>([](const std::string &str) {
return PixelFormat::fromString(str);
}))
.def_property_readonly("fourcc", &PixelFormat::fourcc)
.def_property_readonly("modifier", &PixelFormat::modifier)
.def(py::self == py::self)
.def("__str__", &PixelFormat::toString)
.def("__repr__", [](const PixelFormat &self) {
return "libcamera.PixelFormat('" + self.toString() + "')";
});
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/__init__.py | # SPDX-License-Identifier: LGPL-2.1-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from ._libcamera import *
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_properties_generated.cpp.in | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Auto-generated properties
*
* This file is auto-generated. Do not edit.
*/
#include <libcamera/property_ids.h>
#include <pybind11/pybind11.h>
namespace py = pybind11;
class PyProperties
{
};
${vendors_class_def}
void init_py_properties_generated(py::module& m)
{
auto controls = py::class_<PyProperties>(m, "properties");
${vendors_defs}
${controls}
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_transform.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Transform class
*/
#include <libcamera/transform.h>
#include <libcamera/libcamera.h>
#include <pybind11/operators.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
namespace py = pybind11;
using namespace libcamera;
void init_py_transform(py::module &m)
{
auto pyTransform = py::class_<Transform>(m, "Transform");
pyTransform
.def(py::init([](int rotation, bool hflip, bool vflip, bool transpose) {
bool ok;
Transform t = transformFromRotation(rotation, &ok);
if (!ok)
throw std::invalid_argument("Invalid rotation");
if (hflip)
t ^= Transform::HFlip;
if (vflip)
t ^= Transform::VFlip;
if (transpose)
t ^= Transform::Transpose;
return t;
}), py::arg("rotation") = 0, py::arg("hflip") = false,
py::arg("vflip") = false, py::arg("transpose") = false)
.def(py::init([](Transform &other) { return other; }))
.def("__str__", [](Transform &self) {
return "<libcamera.Transform '" + std::string(transformToString(self)) + "'>";
})
.def_property("hflip",
[](Transform &self) {
return !!(self & Transform::HFlip);
},
[](Transform &self, bool hflip) {
if (hflip)
self |= Transform::HFlip;
else
self &= ~Transform::HFlip;
})
.def_property("vflip",
[](Transform &self) {
return !!(self & Transform::VFlip);
},
[](Transform &self, bool vflip) {
if (vflip)
self |= Transform::VFlip;
else
self &= ~Transform::VFlip;
})
.def_property("transpose",
[](Transform &self) {
return !!(self & Transform::Transpose);
},
[](Transform &self, bool transpose) {
if (transpose)
self |= Transform::Transpose;
else
self &= ~Transform::Transpose;
})
.def("inverse", [](Transform &self) { return -self; })
.def("invert", [](Transform &self) {
self = -self;
})
.def("compose", [](Transform &self, Transform &other) {
self = self * other;
});
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/gen-py-controls.py | #!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Generate Python bindings controls from YAML
import argparse
import string
import sys
import yaml
def find_common_prefix(strings):
prefix = strings[0]
for string in strings[1:]:
while string[:len(prefix)] != prefix and prefix:
prefix = prefix[:len(prefix) - 1]
if not prefix:
break
return prefix
def generate_py(controls, mode):
out = ''
vendors_class_def = []
vendor_defs = []
vendors = []
for vendor, ctrl_list in controls.items():
for ctrls in ctrl_list:
name, ctrl = ctrls.popitem()
if vendor not in vendors and vendor != 'libcamera':
vendor_mode_str = f'{vendor.capitalize()}{mode.capitalize()}'
vendors_class_def.append('class Py{}\n{{\n}};\n'.format(vendor_mode_str))
vendor_defs.append('\tauto {} = py::class_<Py{}>(controls, \"{}\");'.format(vendor, vendor_mode_str, vendor))
vendors.append(vendor)
if vendor != 'libcamera':
ns = 'libcamera::{}::{}::'.format(mode, vendor)
container = vendor
else:
ns = 'libcamera::{}::'.format(mode)
container = 'controls'
out += f'\t{container}.def_readonly_static("{name}", static_cast<const libcamera::ControlId *>(&{ns}{name}));\n\n'
enum = ctrl.get('enum')
if not enum:
continue
cpp_enum = name + 'Enum'
out += '\tpy::enum_<{}{}>({}, \"{}\")\n'.format(ns, cpp_enum, container, cpp_enum)
if mode == 'controls':
# Adjustments for controls
if name == 'LensShadingMapMode':
prefix = 'LensShadingMapMode'
else:
prefix = find_common_prefix([e['name'] for e in enum])
else:
# Adjustments for properties
prefix = find_common_prefix([e['name'] for e in enum])
for entry in enum:
cpp_enum = entry['name']
py_enum = entry['name'][len(prefix):]
out += '\t\t.value(\"{}\", {}{})\n'.format(py_enum, ns, cpp_enum)
out += '\t;\n\n'
return {'controls': out,
'vendors_class_def': '\n'.join(vendors_class_def),
'vendors_defs': '\n'.join(vendor_defs)}
def fill_template(template, data):
template = open(template, 'rb').read()
template = template.decode('utf-8')
template = string.Template(template)
return template.substitute(data)
def main(argv):
# Parse command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--mode', '-m', type=str, required=True,
help='Mode is either "controls" or "properties"')
parser.add_argument('--output', '-o', metavar='file', type=str,
help='Output file name. Defaults to standard output if not specified.')
parser.add_argument('--template', '-t', type=str, required=True,
help='Template file name.')
parser.add_argument('input', type=str, nargs='+',
help='Input file name.')
args = parser.parse_args(argv[1:])
if args.mode not in ['controls', 'properties']:
print(f'Invalid mode option "{args.mode}"', file=sys.stderr)
return -1
controls = {}
for input in args.input:
data = open(input, 'rb').read()
vendor = yaml.safe_load(data)['vendor']
controls[vendor] = yaml.safe_load(data)['controls']
data = generate_py(controls, args.mode)
data = fill_template(args.template, data)
if args.output:
output = open(args.output, 'wb')
output.write(data.encode('utf-8'))
output.close()
else:
sys.stdout.write(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_color_space.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Color Space classes
*/
#include <libcamera/color_space.h>
#include <libcamera/libcamera.h>
#include <pybind11/operators.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
namespace py = pybind11;
using namespace libcamera;
void init_py_color_space(py::module &m)
{
auto pyColorSpace = py::class_<ColorSpace>(m, "ColorSpace");
auto pyColorSpacePrimaries = py::enum_<ColorSpace::Primaries>(pyColorSpace, "Primaries");
auto pyColorSpaceTransferFunction = py::enum_<ColorSpace::TransferFunction>(pyColorSpace, "TransferFunction");
auto pyColorSpaceYcbcrEncoding = py::enum_<ColorSpace::YcbcrEncoding>(pyColorSpace, "YcbcrEncoding");
auto pyColorSpaceRange = py::enum_<ColorSpace::Range>(pyColorSpace, "Range");
pyColorSpace
.def(py::init([](ColorSpace::Primaries primaries,
ColorSpace::TransferFunction transferFunction,
ColorSpace::YcbcrEncoding ycbcrEncoding,
ColorSpace::Range range) {
return ColorSpace(primaries, transferFunction, ycbcrEncoding, range);
}), py::arg("primaries"), py::arg("transferFunction"),
py::arg("ycbcrEncoding"), py::arg("range"))
.def(py::init([](ColorSpace &other) { return other; }))
.def("__str__", [](ColorSpace &self) {
return "<libcamera.ColorSpace '" + self.toString() + "'>";
})
.def_readwrite("primaries", &ColorSpace::primaries)
.def_readwrite("transferFunction", &ColorSpace::transferFunction)
.def_readwrite("ycbcrEncoding", &ColorSpace::ycbcrEncoding)
.def_readwrite("range", &ColorSpace::range)
.def_static("Raw", []() { return ColorSpace::Raw; })
.def_static("Srgb", []() { return ColorSpace::Srgb; })
.def_static("Sycc", []() { return ColorSpace::Sycc; })
.def_static("Smpte170m", []() { return ColorSpace::Smpte170m; })
.def_static("Rec709", []() { return ColorSpace::Rec709; })
.def_static("Rec2020", []() { return ColorSpace::Rec2020; });
pyColorSpacePrimaries
.value("Raw", ColorSpace::Primaries::Raw)
.value("Smpte170m", ColorSpace::Primaries::Smpte170m)
.value("Rec709", ColorSpace::Primaries::Rec709)
.value("Rec2020", ColorSpace::Primaries::Rec2020);
pyColorSpaceTransferFunction
.value("Linear", ColorSpace::TransferFunction::Linear)
.value("Srgb", ColorSpace::TransferFunction::Srgb)
.value("Rec709", ColorSpace::TransferFunction::Rec709);
pyColorSpaceYcbcrEncoding
.value("Null", ColorSpace::YcbcrEncoding::None)
.value("Rec601", ColorSpace::YcbcrEncoding::Rec601)
.value("Rec709", ColorSpace::YcbcrEncoding::Rec709)
.value("Rec2020", ColorSpace::YcbcrEncoding::Rec2020);
pyColorSpaceRange
.value("Full", ColorSpace::Range::Full)
.value("Limited", ColorSpace::Range::Limited);
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_camera_manager.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*/
#include "py_camera_manager.h"
#include <errno.h>
#include <memory>
#include <sys/eventfd.h>
#include <system_error>
#include <unistd.h>
#include <vector>
#include "py_main.h"
namespace py = pybind11;
using namespace libcamera;
PyCameraManager::PyCameraManager()
{
LOG(Python, Debug) << "PyCameraManager()";
cameraManager_ = std::make_unique<CameraManager>();
int fd = eventfd(0, EFD_CLOEXEC | EFD_NONBLOCK);
if (fd == -1)
throw std::system_error(errno, std::generic_category(),
"Failed to create eventfd");
eventFd_ = UniqueFD(fd);
int ret = cameraManager_->start();
if (ret)
throw std::system_error(-ret, std::generic_category(),
"Failed to start CameraManager");
}
PyCameraManager::~PyCameraManager()
{
LOG(Python, Debug) << "~PyCameraManager()";
}
py::list PyCameraManager::cameras()
{
/*
* Create a list of Cameras, where each camera has a keep-alive to
* CameraManager.
*/
py::list l;
for (auto &camera : cameraManager_->cameras()) {
py::object py_cm = py::cast(this);
py::object py_cam = py::cast(camera);
py::detail::keep_alive_impl(py_cam, py_cm);
l.append(py_cam);
}
return l;
}
std::vector<py::object> PyCameraManager::getReadyRequests()
{
int ret = readFd();
if (ret == -EAGAIN)
return std::vector<py::object>();
if (ret != 0)
throw std::system_error(-ret, std::generic_category());
std::vector<py::object> py_reqs;
for (Request *request : getCompletedRequests()) {
py::object o = py::cast(request);
/* Decrease the ref increased in Camera.queue_request() */
o.dec_ref();
py_reqs.push_back(o);
}
return py_reqs;
}
/* Note: Called from another thread */
void PyCameraManager::handleRequestCompleted(Request *req)
{
pushRequest(req);
writeFd();
}
void PyCameraManager::writeFd()
{
uint64_t v = 1;
size_t s = write(eventFd_.get(), &v, 8);
/*
* We should never fail, and have no simple means to manage the error,
* so let's log a fatal error.
*/
if (s != 8)
LOG(Python, Fatal) << "Unable to write to eventfd";
}
int PyCameraManager::readFd()
{
uint8_t buf[8];
ssize_t ret = read(eventFd_.get(), buf, 8);
if (ret == 8)
return 0;
else if (ret < 0)
return -errno;
else
return -EIO;
}
void PyCameraManager::pushRequest(Request *req)
{
MutexLocker guard(completedRequestsMutex_);
completedRequests_.push_back(req);
}
std::vector<Request *> PyCameraManager::getCompletedRequests()
{
std::vector<Request *> v;
MutexLocker guard(completedRequestsMutex_);
swap(v, completedRequests_);
return v;
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_camera_manager.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*/
#pragma once
#include <libcamera/base/mutex.h>
#include <libcamera/libcamera.h>
#include <pybind11/pybind11.h>
using namespace libcamera;
class PyCameraManager
{
public:
PyCameraManager();
~PyCameraManager();
pybind11::list cameras();
std::shared_ptr<Camera> get(const std::string &name) { return cameraManager_->get(name); }
static const std::string &version() { return CameraManager::version(); }
int eventFd() const { return eventFd_.get(); }
std::vector<pybind11::object> getReadyRequests();
void handleRequestCompleted(Request *req);
private:
std::unique_ptr<CameraManager> cameraManager_;
UniqueFD eventFd_;
libcamera::Mutex completedRequestsMutex_;
std::vector<Request *> completedRequests_
LIBCAMERA_TSA_GUARDED_BY(completedRequestsMutex_);
void writeFd();
int readFd();
void pushRequest(Request *req);
std::vector<Request *> getCompletedRequests();
};
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/gen-py-formats.py | #!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Generate Python format definitions from YAML
import argparse
import string
import sys
import yaml
def generate(formats):
fmts = []
for format in formats:
name, format = format.popitem()
fmts.append(f'\t\t.def_readonly_static("{name}", &libcamera::formats::{name})')
return {'formats': '\n'.join(fmts)}
def fill_template(template, data):
with open(template, encoding='utf-8') as f:
template = f.read()
template = string.Template(template)
return template.substitute(data)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('-o', dest='output', metavar='file', type=str,
help='Output file name. Defaults to standard output if not specified.')
parser.add_argument('input', type=str,
help='Input file name.')
parser.add_argument('template', type=str,
help='Template file name.')
args = parser.parse_args(argv[1:])
with open(args.input, encoding='utf-8') as f:
formats = yaml.safe_load(f)['formats']
data = generate(formats)
data = fill_template(args.template, data)
if args.output:
with open(args.output, 'w', encoding='utf-8') as f:
f.write(data)
else:
sys.stdout.write(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_helpers.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*/
#include "py_helpers.h"
#include <libcamera/libcamera.h>
#include <pybind11/functional.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
namespace py = pybind11;
using namespace libcamera;
template<typename T>
static py::object valueOrTuple(const ControlValue &cv)
{
if (cv.isArray()) {
const T *v = reinterpret_cast<const T *>(cv.data().data());
auto t = py::tuple(cv.numElements());
for (size_t i = 0; i < cv.numElements(); ++i)
t[i] = v[i];
return std::move(t);
}
return py::cast(cv.get<T>());
}
py::object controlValueToPy(const ControlValue &cv)
{
switch (cv.type()) {
case ControlTypeBool:
return valueOrTuple<bool>(cv);
case ControlTypeByte:
return valueOrTuple<uint8_t>(cv);
case ControlTypeInteger32:
return valueOrTuple<int32_t>(cv);
case ControlTypeInteger64:
return valueOrTuple<int64_t>(cv);
case ControlTypeFloat:
return valueOrTuple<float>(cv);
case ControlTypeString:
return py::cast(cv.get<std::string>());
case ControlTypeRectangle:
return valueOrTuple<Rectangle>(cv);
case ControlTypeSize: {
const Size *v = reinterpret_cast<const Size *>(cv.data().data());
return py::cast(v);
}
case ControlTypeNone:
return py::none();
default:
throw std::runtime_error("Unsupported ControlValue type");
}
}
template<typename T>
static ControlValue controlValueMaybeArray(const py::object &ob)
{
if (py::isinstance<py::list>(ob) || py::isinstance<py::tuple>(ob)) {
std::vector<T> vec = ob.cast<std::vector<T>>();
return ControlValue(Span<const T>(vec));
}
return ControlValue(ob.cast<T>());
}
ControlValue pyToControlValue(const py::object &ob, ControlType type)
{
switch (type) {
case ControlTypeBool:
return ControlValue(ob.cast<bool>());
case ControlTypeByte:
return controlValueMaybeArray<uint8_t>(ob);
case ControlTypeInteger32:
return controlValueMaybeArray<int32_t>(ob);
case ControlTypeInteger64:
return controlValueMaybeArray<int64_t>(ob);
case ControlTypeFloat:
return controlValueMaybeArray<float>(ob);
case ControlTypeString:
return ControlValue(ob.cast<std::string>());
case ControlTypeRectangle:
return controlValueMaybeArray<Rectangle>(ob);
case ControlTypeSize:
return ControlValue(ob.cast<Size>());
case ControlTypeNone:
return ControlValue();
default:
throw std::runtime_error("Control type not implemented");
}
}
|
0 | repos/libcamera/src/py | repos/libcamera/src/py/libcamera/py_controls_generated.cpp.in | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2022, Tomi Valkeinen <[email protected]>
*
* Python bindings - Auto-generated controls
*
* This file is auto-generated. Do not edit.
*/
#include <libcamera/control_ids.h>
#include <pybind11/pybind11.h>
namespace py = pybind11;
class PyControls
{
};
${vendors_class_def}
void init_py_controls_generated(py::module& m)
{
auto controls = py::class_<PyControls>(m, "controls");
${vendors_defs}
${controls}
}
|
0 | repos/libcamera/src/py/libcamera | repos/libcamera/src/py/libcamera/utils/MappedFrameBuffer.py | # SPDX-License-Identifier: LGPL-2.1-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
import libcamera
from typing import Tuple
class MappedFrameBuffer:
"""
Provides memoryviews for the FrameBuffer's planes
"""
def __init__(self, fb: libcamera.FrameBuffer):
self.__fb = fb
self.__planes = ()
self.__maps = ()
def __enter__(self):
return self.mmap()
def __exit__(self, exc_type, exc_value, exc_traceback):
self.munmap()
def mmap(self):
if self.__planes:
raise RuntimeError('MappedFrameBuffer already mmapped')
import os
import mmap
fb = self.__fb
# Collect information about the buffers
bufinfos = {}
for plane in fb.planes:
fd = plane.fd
if fd not in bufinfos:
buflen = os.lseek(fd, 0, os.SEEK_END)
bufinfos[fd] = {'maplen': 0, 'buflen': buflen}
else:
buflen = bufinfos[fd]['buflen']
if plane.offset > buflen or plane.offset + plane.length > buflen:
raise RuntimeError(f'plane is out of buffer: buffer length={buflen}, ' +
f'plane offset={plane.offset}, plane length={plane.length}')
bufinfos[fd]['maplen'] = max(bufinfos[fd]['maplen'], plane.offset + plane.length)
# mmap the buffers
maps = []
for fd, info in bufinfos.items():
map = mmap.mmap(fd, info['maplen'], mmap.MAP_SHARED, mmap.PROT_READ | mmap.PROT_WRITE)
info['map'] = map
maps.append(map)
self.__maps = tuple(maps)
# Create memoryviews for the planes
planes = []
for plane in fb.planes:
fd = plane.fd
info = bufinfos[fd]
mv = memoryview(info['map'])
start = plane.offset
end = plane.offset + plane.length
mv = mv[start:end]
planes.append(mv)
self.__planes = tuple(planes)
return self
def munmap(self):
if not self.__planes:
raise RuntimeError('MappedFrameBuffer not mmapped')
for p in self.__planes:
p.release()
for mm in self.__maps:
mm.close()
self.__planes = ()
self.__maps = ()
@property
def planes(self) -> Tuple[memoryview, ...]:
"""memoryviews for the planes"""
if not self.__planes:
raise RuntimeError('MappedFrameBuffer not mmapped')
return self.__planes
@property
def fb(self):
return self.__fb
|
0 | repos/libcamera/src/py/libcamera | repos/libcamera/src/py/libcamera/utils/__init__.py | # SPDX-License-Identifier: LGPL-2.1-or-later
# Copyright (C) 2022, Tomi Valkeinen <[email protected]>
from .MappedFrameBuffer import MappedFrameBuffer
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerasrc.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Capture Element
*/
/**
* \todo The following is a list of items that needs implementation in the GStreamer plugin
* - Implement GstElement::send_event
* + Allowing application to use FLUSH/FLUSH_STOP
* + Prevent the main thread from accessing streaming thread
* - Implement GstElement::request-new-pad (multi stream)
* + Evaluate if a single streaming thread is fine
* - Add application driven request (snapshot)
* - Add framerate control
* - Add buffer importation support
*
* Requires new libcamera API:
* - Add framerate negotiation support
* - Add colorimetry support
* - Add timestamp support
* - Use unique names to select the camera devices
* - Add GstVideoMeta support (strides and offsets)
*/
#include "gstlibcamerasrc.h"
#include <atomic>
#include <queue>
#include <vector>
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
#include <libcamera/control_ids.h>
#include <gst/base/base.h>
#include "gstlibcameraallocator.h"
#include "gstlibcamerapad.h"
#include "gstlibcamerapool.h"
#include "gstlibcamera-utils.h"
using namespace libcamera;
GST_DEBUG_CATEGORY_STATIC(source_debug);
#define GST_CAT_DEFAULT source_debug
struct RequestWrap {
RequestWrap(std::unique_ptr<Request> request);
~RequestWrap();
void attachBuffer(Stream *stream, GstBuffer *buffer);
GstBuffer *detachBuffer(Stream *stream);
std::unique_ptr<Request> request_;
std::map<Stream *, GstBuffer *> buffers_;
GstClockTime latency_;
GstClockTime pts_;
};
RequestWrap::RequestWrap(std::unique_ptr<Request> request)
: request_(std::move(request)), latency_(0), pts_(GST_CLOCK_TIME_NONE)
{
}
RequestWrap::~RequestWrap()
{
for (std::pair<Stream *const, GstBuffer *> &item : buffers_) {
if (item.second)
gst_buffer_unref(item.second);
}
}
void RequestWrap::attachBuffer(Stream *stream, GstBuffer *buffer)
{
FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer);
request_->addBuffer(stream, fb);
auto item = buffers_.find(stream);
if (item != buffers_.end()) {
gst_buffer_unref(item->second);
item->second = buffer;
} else {
buffers_[stream] = buffer;
}
}
GstBuffer *RequestWrap::detachBuffer(Stream *stream)
{
GstBuffer *buffer = nullptr;
auto item = buffers_.find(stream);
if (item != buffers_.end()) {
buffer = item->second;
item->second = nullptr;
}
return buffer;
}
/* Used for C++ object with destructors. */
struct GstLibcameraSrcState {
GstLibcameraSrc *src_;
std::shared_ptr<CameraManager> cm_;
std::shared_ptr<Camera> cam_;
std::unique_ptr<CameraConfiguration> config_;
std::vector<GstPad *> srcpads_; /* Protected by stream_lock */
/*
* Contention on this lock_ must be minimized, as it has to be taken in
* the realtime-sensitive requestCompleted() handler to protect
* queuedRequests_ and completedRequests_.
*
* stream_lock must be taken before lock_ in contexts where both locks
* need to be taken. In particular, this means that the lock_ must not
* be held while calling into other graph elements (e.g. when calling
* gst_pad_query()).
*/
GMutex lock_;
std::queue<std::unique_ptr<RequestWrap>> queuedRequests_;
std::queue<std::unique_ptr<RequestWrap>> completedRequests_;
ControlList initControls_;
guint group_id_;
int queueRequest();
void requestCompleted(Request *request);
int processRequest();
void clearRequests();
};
struct _GstLibcameraSrc {
GstElement parent;
GRecMutex stream_lock;
GstTask *task;
gchar *camera_name;
controls::AfModeEnum auto_focus_mode = controls::AfModeManual;
std::atomic<GstEvent *> pending_eos;
GstLibcameraSrcState *state;
GstLibcameraAllocator *allocator;
GstFlowCombiner *flow_combiner;
};
enum {
PROP_0,
PROP_CAMERA_NAME,
PROP_AUTO_FOCUS_MODE,
};
G_DEFINE_TYPE_WITH_CODE(GstLibcameraSrc, gst_libcamera_src, GST_TYPE_ELEMENT,
GST_DEBUG_CATEGORY_INIT(source_debug, "libcamerasrc", 0,
"libcamera Source"))
#define TEMPLATE_CAPS GST_STATIC_CAPS("video/x-raw; image/jpeg; video/x-bayer")
/* For the simple case, we have a src pad that is always present. */
GstStaticPadTemplate src_template = {
"src", GST_PAD_SRC, GST_PAD_ALWAYS, TEMPLATE_CAPS
};
/* More pads can be requested in state < PAUSED */
GstStaticPadTemplate request_src_template = {
"src_%u", GST_PAD_SRC, GST_PAD_REQUEST, TEMPLATE_CAPS
};
/* Must be called with stream_lock held. */
int GstLibcameraSrcState::queueRequest()
{
std::unique_ptr<Request> request = cam_->createRequest();
if (!request)
return -ENOMEM;
std::unique_ptr<RequestWrap> wrap =
std::make_unique<RequestWrap>(std::move(request));
for (GstPad *srcpad : srcpads_) {
Stream *stream = gst_libcamera_pad_get_stream(srcpad);
GstLibcameraPool *pool = gst_libcamera_pad_get_pool(srcpad);
GstBuffer *buffer;
GstFlowReturn ret;
ret = gst_buffer_pool_acquire_buffer(GST_BUFFER_POOL(pool),
&buffer, nullptr);
if (ret != GST_FLOW_OK) {
/*
* RequestWrap has ownership of the request, and we
* won't be queueing this one due to lack of buffers.
*/
return -ENOBUFS;
}
wrap->attachBuffer(stream, buffer);
}
GST_TRACE_OBJECT(src_, "Requesting buffers");
cam_->queueRequest(wrap->request_.get());
{
GLibLocker locker(&lock_);
queuedRequests_.push(std::move(wrap));
}
/* The RequestWrap will be deleted in the completion handler. */
return 0;
}
void
GstLibcameraSrcState::requestCompleted(Request *request)
{
GST_DEBUG_OBJECT(src_, "buffers are ready");
std::unique_ptr<RequestWrap> wrap;
{
GLibLocker locker(&lock_);
wrap = std::move(queuedRequests_.front());
queuedRequests_.pop();
}
g_return_if_fail(wrap->request_.get() == request);
if ((request->status() == Request::RequestCancelled)) {
GST_DEBUG_OBJECT(src_, "Request was cancelled");
return;
}
if (GST_ELEMENT_CLOCK(src_)) {
int64_t timestamp = request->metadata().get(controls::SensorTimestamp).value_or(0);
GstClockTime gst_base_time = GST_ELEMENT(src_)->base_time;
GstClockTime gst_now = gst_clock_get_time(GST_ELEMENT_CLOCK(src_));
/* \todo Need to expose which reference clock the timestamp relates to. */
GstClockTime sys_now = g_get_monotonic_time() * 1000;
/* Deduced from: sys_now - sys_base_time == gst_now - gst_base_time */
GstClockTime sys_base_time = sys_now - (gst_now - gst_base_time);
wrap->pts_ = timestamp - sys_base_time;
wrap->latency_ = sys_now - timestamp;
}
{
GLibLocker locker(&lock_);
completedRequests_.push(std::move(wrap));
}
gst_task_resume(src_->task);
}
/* Must be called with stream_lock held. */
int GstLibcameraSrcState::processRequest()
{
std::unique_ptr<RequestWrap> wrap;
int err = 0;
{
GLibLocker locker(&lock_);
if (!completedRequests_.empty()) {
wrap = std::move(completedRequests_.front());
completedRequests_.pop();
}
if (completedRequests_.empty())
err = -ENOBUFS;
}
if (!wrap)
return -ENOBUFS;
GstFlowReturn ret = GST_FLOW_OK;
gst_flow_combiner_reset(src_->flow_combiner);
for (GstPad *srcpad : srcpads_) {
Stream *stream = gst_libcamera_pad_get_stream(srcpad);
GstBuffer *buffer = wrap->detachBuffer(stream);
FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer);
if (GST_CLOCK_TIME_IS_VALID(wrap->pts_)) {
GST_BUFFER_PTS(buffer) = wrap->pts_;
gst_libcamera_pad_set_latency(srcpad, wrap->latency_);
} else {
GST_BUFFER_PTS(buffer) = 0;
}
GST_BUFFER_OFFSET(buffer) = fb->metadata().sequence;
GST_BUFFER_OFFSET_END(buffer) = fb->metadata().sequence;
ret = gst_pad_push(srcpad, buffer);
ret = gst_flow_combiner_update_pad_flow(src_->flow_combiner,
srcpad, ret);
}
switch (ret) {
case GST_FLOW_OK:
break;
case GST_FLOW_NOT_NEGOTIATED: {
bool reconfigure = false;
for (GstPad *srcpad : srcpads_) {
if (gst_pad_needs_reconfigure(srcpad)) {
reconfigure = true;
break;
}
}
/* If no pads need a reconfiguration something went wrong. */
if (!reconfigure)
err = -EPIPE;
break;
}
case GST_FLOW_EOS: {
g_autoptr(GstEvent) eos = gst_event_new_eos();
guint32 seqnum = gst_util_seqnum_next();
gst_event_set_seqnum(eos, seqnum);
for (GstPad *srcpad : srcpads_)
gst_pad_push_event(srcpad, gst_event_ref(eos));
err = -EPIPE;
break;
}
case GST_FLOW_FLUSHING:
err = -EPIPE;
break;
default:
GST_ELEMENT_FLOW_ERROR(src_, ret);
err = -EPIPE;
break;
}
return err;
}
void GstLibcameraSrcState::clearRequests()
{
GLibLocker locker(&lock_);
completedRequests_ = {};
}
static bool
gst_libcamera_src_open(GstLibcameraSrc *self)
{
std::shared_ptr<CameraManager> cm;
std::shared_ptr<Camera> cam;
gint ret;
GST_DEBUG_OBJECT(self, "Opening camera device ...");
cm = gst_libcamera_get_camera_manager(ret);
if (ret) {
GST_ELEMENT_ERROR(self, LIBRARY, INIT,
("Failed listing cameras."),
("libcamera::CameraMananger::start() failed: %s", g_strerror(-ret)));
return false;
}
g_autofree gchar *camera_name = nullptr;
{
GLibLocker lock(GST_OBJECT(self));
if (self->camera_name)
camera_name = g_strdup(self->camera_name);
}
if (camera_name) {
cam = cm->get(camera_name);
if (!cam) {
GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND,
("Could not find a camera named '%s'.", camera_name),
("libcamera::CameraMananger::get() returned nullptr"));
return false;
}
} else {
auto cameras = cm->cameras();
if (cameras.empty()) {
GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND,
("Could not find any supported camera on this system."),
("libcamera::CameraMananger::cameras() is empty"));
return false;
}
cam = cameras[0];
}
GST_INFO_OBJECT(self, "Using camera '%s'", cam->id().c_str());
ret = cam->acquire();
if (ret) {
GST_ELEMENT_ERROR(self, RESOURCE, BUSY,
("Camera '%s' is already in use.", cam->id().c_str()),
("libcamera::Camera::acquire() failed: %s", g_strerror(ret)));
return false;
}
cam->requestCompleted.connect(self->state, &GstLibcameraSrcState::requestCompleted);
/* No need to lock here, we didn't start our threads yet. */
self->state->cm_ = cm;
self->state->cam_ = cam;
return true;
}
/* Must be called with stream_lock held. */
static bool
gst_libcamera_src_negotiate(GstLibcameraSrc *self)
{
GstLibcameraSrcState *state = self->state;
g_autoptr(GstStructure) element_caps = gst_structure_new_empty("caps");
for (gsize i = 0; i < state->srcpads_.size(); i++) {
GstPad *srcpad = state->srcpads_[i];
StreamConfiguration &stream_cfg = state->config_->at(i);
/* Retrieve the supported caps. */
g_autoptr(GstCaps) filter = gst_libcamera_stream_formats_to_caps(stream_cfg.formats());
g_autoptr(GstCaps) caps = gst_pad_peer_query_caps(srcpad, filter);
if (gst_caps_is_empty(caps))
return false;
/* Fixate caps and configure the stream. */
caps = gst_caps_make_writable(caps);
gst_libcamera_configure_stream_from_caps(stream_cfg, caps);
gst_libcamera_get_framerate_from_caps(caps, element_caps);
}
/* Validate the configuration. */
if (state->config_->validate() == CameraConfiguration::Invalid)
return false;
int ret = state->cam_->configure(state->config_.get());
if (ret) {
GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
("Failed to configure camera: %s", g_strerror(-ret)),
("Camera::configure() failed with error code %i", ret));
return false;
}
/* Check frame duration bounds within controls::FrameDurationLimits */
gst_libcamera_clamp_and_set_frameduration(state->initControls_,
state->cam_->controls(), element_caps);
/*
* Regardless if it has been modified, create clean caps and push the
* caps event. Downstream will decide if the caps are acceptable.
*/
for (gsize i = 0; i < state->srcpads_.size(); i++) {
GstPad *srcpad = state->srcpads_[i];
const StreamConfiguration &stream_cfg = state->config_->at(i);
g_autoptr(GstCaps) caps = gst_libcamera_stream_configuration_to_caps(stream_cfg);
gst_libcamera_framerate_to_caps(caps, element_caps);
if (!gst_pad_push_event(srcpad, gst_event_new_caps(caps)))
return false;
}
if (self->allocator)
g_clear_object(&self->allocator);
self->allocator = gst_libcamera_allocator_new(state->cam_, state->config_.get());
if (!self->allocator) {
GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT,
("Failed to allocate memory"),
("gst_libcamera_allocator_new() failed."));
return false;
}
for (gsize i = 0; i < state->srcpads_.size(); i++) {
GstPad *srcpad = state->srcpads_[i];
const StreamConfiguration &stream_cfg = state->config_->at(i);
GstLibcameraPool *pool = gst_libcamera_pool_new(self->allocator,
stream_cfg.stream());
g_signal_connect_swapped(pool, "buffer-notify",
G_CALLBACK(gst_task_resume), self->task);
gst_libcamera_pad_set_pool(srcpad, pool);
/* Clear all reconfigure flags. */
gst_pad_check_reconfigure(srcpad);
}
return true;
}
static void
gst_libcamera_src_task_run(gpointer user_data)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data);
GstLibcameraSrcState *state = self->state;
/*
* Start by pausing the task. The task may also get resumed by the
* buffer-notify signal when new buffers are queued back to the pool,
* or by the request completion handler when a new request has
* completed. Both will resume the task after adding the buffers or
* request to their respective lists, which are checked below to decide
* if the task needs to be resumed for another iteration. This is thus
* guaranteed to be race-free, the lock taken by gst_task_pause() and
* gst_task_resume() serves as a memory barrier.
*/
gst_task_pause(self->task);
bool doResume = false;
g_autoptr(GstEvent) event = self->pending_eos.exchange(nullptr);
if (event) {
for (GstPad *srcpad : state->srcpads_)
gst_pad_push_event(srcpad, gst_event_ref(event));
return;
}
/* Check if a srcpad requested a renegotiation. */
bool reconfigure = false;
for (GstPad *srcpad : state->srcpads_) {
if (gst_pad_check_reconfigure(srcpad)) {
/* Check if the caps even need changing. */
g_autoptr(GstCaps) caps = gst_pad_get_current_caps(srcpad);
if (!gst_pad_peer_query_accept_caps(srcpad, caps)) {
reconfigure = true;
break;
}
}
}
if (reconfigure) {
state->cam_->stop();
state->clearRequests();
if (!gst_libcamera_src_negotiate(self)) {
GST_ELEMENT_FLOW_ERROR(self, GST_FLOW_NOT_NEGOTIATED);
gst_task_stop(self->task);
}
state->cam_->start(&state->initControls_);
}
/*
* Create and queue one request. If no buffers are available the
* function returns -ENOBUFS, which we ignore here as that's not a
* fatal error.
*/
int ret = state->queueRequest();
switch (ret) {
case 0:
/*
* The request was successfully queued, there may be enough
* buffers to create a new one. Don't pause the task to give it
* another try.
*/
doResume = true;
break;
case -ENOMEM:
GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT,
("Failed to allocate request for camera '%s'.",
state->cam_->id().c_str()),
("libcamera::Camera::createRequest() failed"));
gst_task_stop(self->task);
return;
case -ENOBUFS:
default:
break;
}
/*
* Process one completed request, if available, and record if further
* requests are ready for processing.
*/
ret = state->processRequest();
switch (ret) {
case 0:
/* Another completed request is available, resume the task. */
doResume = true;
break;
case -EPIPE:
gst_task_stop(self->task);
return;
case -ENOBUFS:
default:
break;
}
/* Resume the task for another iteration if needed. */
if (doResume)
gst_task_resume(self->task);
}
static void
gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread,
gpointer user_data)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data);
GLibRecLocker lock(&self->stream_lock);
GstLibcameraSrcState *state = self->state;
gint ret;
GST_DEBUG_OBJECT(self, "Streaming thread has started");
gint stream_id_num = 0;
std::vector<StreamRole> roles;
for (GstPad *srcpad : state->srcpads_) {
/* Create stream-id and push stream-start. */
g_autofree gchar *stream_id_intermediate = g_strdup_printf("%i%i", state->group_id_, stream_id_num++);
g_autofree gchar *stream_id = gst_pad_create_stream_id(srcpad, GST_ELEMENT(self), stream_id_intermediate);
GstEvent *event = gst_event_new_stream_start(stream_id);
gst_event_set_group_id(event, state->group_id_);
gst_pad_push_event(srcpad, event);
/* Collect the streams roles for the next iteration. */
roles.push_back(gst_libcamera_pad_get_role(srcpad));
}
/* Generate the stream configurations, there should be one per pad. */
state->config_ = state->cam_->generateConfiguration(roles);
if (state->config_ == nullptr) {
GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
("Failed to generate camera configuration from roles"),
("Camera::generateConfiguration() returned nullptr"));
gst_task_stop(task);
return;
}
g_assert(state->config_->size() == state->srcpads_.size());
if (!gst_libcamera_src_negotiate(self)) {
state->initControls_.clear();
GST_ELEMENT_FLOW_ERROR(self, GST_FLOW_NOT_NEGOTIATED);
gst_task_stop(task);
return;
}
self->flow_combiner = gst_flow_combiner_new();
for (GstPad *srcpad : state->srcpads_) {
gst_flow_combiner_add_pad(self->flow_combiner, srcpad);
/* Send an open segment event with time format. */
GstSegment segment;
gst_segment_init(&segment, GST_FORMAT_TIME);
gst_pad_push_event(srcpad, gst_event_new_segment(&segment));
}
if (self->auto_focus_mode != controls::AfModeManual) {
const ControlInfoMap &infoMap = state->cam_->controls();
if (infoMap.find(&controls::AfMode) != infoMap.end()) {
state->initControls_.set(controls::AfMode, self->auto_focus_mode);
} else {
GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
("Failed to enable auto focus"),
("AfMode not supported by this camera, "
"please retry with 'auto-focus-mode=AfModeManual'"));
}
}
ret = state->cam_->start(&state->initControls_);
if (ret) {
GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
("Failed to start the camera: %s", g_strerror(-ret)),
("Camera.start() failed with error code %i", ret));
gst_task_stop(task);
return;
}
}
static void
gst_libcamera_src_task_leave([[maybe_unused]] GstTask *task,
[[maybe_unused]] GThread *thread,
gpointer user_data)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data);
GstLibcameraSrcState *state = self->state;
GST_DEBUG_OBJECT(self, "Streaming thread is about to stop");
state->cam_->stop();
state->clearRequests();
{
GLibRecLocker locker(&self->stream_lock);
for (GstPad *srcpad : state->srcpads_)
gst_libcamera_pad_set_pool(srcpad, nullptr);
}
g_clear_object(&self->allocator);
g_clear_pointer(&self->flow_combiner,
(GDestroyNotify)gst_flow_combiner_free);
}
static void
gst_libcamera_src_close(GstLibcameraSrc *self)
{
GstLibcameraSrcState *state = self->state;
gint ret;
GST_DEBUG_OBJECT(self, "Releasing resources");
state->config_.reset();
ret = state->cam_->release();
if (ret) {
GST_ELEMENT_WARNING(self, RESOURCE, BUSY,
("Camera '%s' is still in use.", state->cam_->id().c_str()),
("libcamera::Camera.release() failed: %s", g_strerror(-ret)));
}
state->cam_.reset();
state->cm_.reset();
}
static void
gst_libcamera_src_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec)
{
GLibLocker lock(GST_OBJECT(object));
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(object);
switch (prop_id) {
case PROP_CAMERA_NAME:
g_free(self->camera_name);
self->camera_name = g_value_dup_string(value);
break;
case PROP_AUTO_FOCUS_MODE:
self->auto_focus_mode = static_cast<controls::AfModeEnum>(g_value_get_enum(value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void
gst_libcamera_src_get_property(GObject *object, guint prop_id, GValue *value,
GParamSpec *pspec)
{
GLibLocker lock(GST_OBJECT(object));
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(object);
switch (prop_id) {
case PROP_CAMERA_NAME:
g_value_set_string(value, self->camera_name);
break;
case PROP_AUTO_FOCUS_MODE:
g_value_set_enum(value, static_cast<gint>(self->auto_focus_mode));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_libcamera_src_change_state(GstElement *element, GstStateChange transition)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstElementClass *klass = GST_ELEMENT_CLASS(gst_libcamera_src_parent_class);
ret = klass->change_state(element, transition);
if (ret == GST_STATE_CHANGE_FAILURE)
return ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
if (!gst_libcamera_src_open(self))
return GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* This needs to be called after pads activation.*/
self->state->group_id_ = gst_util_group_id_next();
if (!gst_task_pause(self->task))
return GST_STATE_CHANGE_FAILURE;
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
gst_task_start(self->task);
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/*
* \todo this might require some thread unblocking in the future
* if the streaming thread starts doing any kind of blocking
* operations. If this was the case, we would need to do so
* before pad deactivation, so before chaining to the parent
* change_state function.
*/
gst_task_join(self->task);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
gst_libcamera_src_close(self);
break;
default:
break;
}
return ret;
}
static gboolean
gst_libcamera_src_send_event(GstElement *element, GstEvent *event)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
gboolean ret = FALSE;
switch (GST_EVENT_TYPE(event)) {
case GST_EVENT_EOS: {
GstEvent *oldEvent = self->pending_eos.exchange(event);
gst_clear_event(&oldEvent);
ret = TRUE;
break;
}
default:
gst_event_unref(event);
break;
}
return ret;
}
static void
gst_libcamera_src_finalize(GObject *object)
{
GObjectClass *klass = G_OBJECT_CLASS(gst_libcamera_src_parent_class);
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(object);
g_rec_mutex_clear(&self->stream_lock);
g_clear_object(&self->task);
g_mutex_clear(&self->state->lock_);
g_free(self->camera_name);
delete self->state;
return klass->finalize(object);
}
static void
gst_libcamera_src_init(GstLibcameraSrc *self)
{
GstLibcameraSrcState *state = new GstLibcameraSrcState();
GstPadTemplate *templ = gst_element_get_pad_template(GST_ELEMENT(self), "src");
g_rec_mutex_init(&self->stream_lock);
self->task = gst_task_new(gst_libcamera_src_task_run, self, nullptr);
gst_task_set_enter_callback(self->task, gst_libcamera_src_task_enter, self, nullptr);
gst_task_set_leave_callback(self->task, gst_libcamera_src_task_leave, self, nullptr);
gst_task_set_lock(self->task, &self->stream_lock);
g_mutex_init(&state->lock_);
state->srcpads_.push_back(gst_pad_new_from_template(templ, "src"));
gst_element_add_pad(GST_ELEMENT(self), state->srcpads_.back());
GST_OBJECT_FLAG_SET(self, GST_ELEMENT_FLAG_SOURCE);
/* C-style friend. */
state->src_ = self;
self->state = state;
}
static GstPad *
gst_libcamera_src_request_new_pad(GstElement *element, GstPadTemplate *templ,
const gchar *name, [[maybe_unused]] const GstCaps *caps)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
g_autoptr(GstPad) pad = NULL;
GST_DEBUG_OBJECT(self, "new request pad created");
pad = gst_pad_new_from_template(templ, name);
g_object_ref_sink(pad);
if (gst_element_add_pad(element, pad)) {
GLibRecLocker lock(&self->stream_lock);
self->state->srcpads_.push_back(reinterpret_cast<GstPad *>(g_object_ref(pad)));
} else {
GST_ELEMENT_ERROR(element, STREAM, FAILED,
("Internal data stream error."),
("Could not add pad to element"));
return NULL;
}
return reinterpret_cast<GstPad *>(g_steal_pointer(&pad));
}
static void
gst_libcamera_src_release_pad(GstElement *element, GstPad *pad)
{
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
GST_DEBUG_OBJECT(self, "Pad %" GST_PTR_FORMAT " being released", pad);
{
GLibRecLocker lock(&self->stream_lock);
std::vector<GstPad *> &pads = self->state->srcpads_;
auto begin_iterator = pads.begin();
auto end_iterator = pads.end();
auto pad_iterator = std::find(begin_iterator, end_iterator, pad);
if (pad_iterator != end_iterator) {
g_object_unref(*pad_iterator);
pads.erase(pad_iterator);
}
}
gst_element_remove_pad(element, pad);
}
static void
gst_libcamera_src_class_init(GstLibcameraSrcClass *klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
GObjectClass *object_class = G_OBJECT_CLASS(klass);
object_class->set_property = gst_libcamera_src_set_property;
object_class->get_property = gst_libcamera_src_get_property;
object_class->finalize = gst_libcamera_src_finalize;
element_class->request_new_pad = gst_libcamera_src_request_new_pad;
element_class->release_pad = gst_libcamera_src_release_pad;
element_class->change_state = gst_libcamera_src_change_state;
element_class->send_event = gst_libcamera_src_send_event;
gst_element_class_set_metadata(element_class,
"libcamera Source", "Source/Video",
"Linux Camera source using libcamera",
"Nicolas Dufresne <[email protected]");
gst_element_class_add_static_pad_template_with_gtype(element_class,
&src_template,
GST_TYPE_LIBCAMERA_PAD);
gst_element_class_add_static_pad_template_with_gtype(element_class,
&request_src_template,
GST_TYPE_LIBCAMERA_PAD);
GParamSpec *spec = g_param_spec_string("camera-name", "Camera Name",
"Select by name which camera to use.", nullptr,
(GParamFlags)(GST_PARAM_MUTABLE_READY
| G_PARAM_CONSTRUCT
| G_PARAM_READWRITE
| G_PARAM_STATIC_STRINGS));
g_object_class_install_property(object_class, PROP_CAMERA_NAME, spec);
spec = g_param_spec_enum("auto-focus-mode",
"Set auto-focus mode",
"Available options: AfModeManual, "
"AfModeAuto or AfModeContinuous.",
gst_libcamera_auto_focus_get_type(),
static_cast<gint>(controls::AfModeManual),
G_PARAM_WRITABLE);
g_object_class_install_property(object_class, PROP_AUTO_FOCUS_MODE, spec);
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerasrc.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Capture Element
*/
#pragma once
#include <libcamera/control_ids.h>
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_LIBCAMERA_SRC gst_libcamera_src_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraSrc, gst_libcamera_src,
GST_LIBCAMERA, SRC, GstElement)
G_END_DECLS
inline GType
gst_libcamera_auto_focus_get_type()
{
static GType type = 0;
static const GEnumValue values[] = {
{
static_cast<gint>(libcamera::controls::AfModeManual),
"AfModeManual",
"manual-focus",
},
{
static_cast<gint>(libcamera::controls::AfModeAuto),
"AfModeAuto",
"automatic-auto-focus",
},
{
static_cast<gint>(libcamera::controls::AfModeContinuous),
"AfModeContinuous",
"continuous-auto-focus",
},
{ 0, NULL, NULL }
};
if (!type)
type = g_enum_register_static("GstLibcameraAutoFocus", values);
return type;
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcameraallocator.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Custom Allocator
*/
#include "gstlibcameraallocator.h"
#include <libcamera/camera.h>
#include <libcamera/framebuffer_allocator.h>
#include <libcamera/stream.h>
#include "gstlibcamera-utils.h"
using namespace libcamera;
static gboolean gst_libcamera_allocator_release(GstMiniObject *mini_object);
/**
* \struct FrameWrap
* \brief An internal wrapper to track the relation between FrameBuffer and
* GstMemory(s)
*
* This wrapper maintains a count of the outstanding GstMemory (there may be
* multiple GstMemory per FrameBuffer), and give back the FrameBuffer to the
* allocator pool when all memory objects have returned.
*/
struct FrameWrap {
FrameWrap(GstAllocator *allocator, FrameBuffer *buffer,
gpointer stream);
~FrameWrap();
void acquirePlane() { ++outstandingPlanes_; }
bool releasePlane() { return --outstandingPlanes_ == 0; }
static GQuark getQuark();
gpointer stream_;
FrameBuffer *buffer_;
std::vector<GstMemory *> planes_;
gint outstandingPlanes_;
};
FrameWrap::FrameWrap(GstAllocator *allocator, FrameBuffer *buffer,
gpointer stream)
: stream_(stream),
buffer_(buffer),
outstandingPlanes_(0)
{
for (const FrameBuffer::Plane &plane : buffer->planes()) {
GstMemory *mem = gst_fd_allocator_alloc(allocator, plane.fd.get(),
plane.offset + plane.length,
GST_FD_MEMORY_FLAG_DONT_CLOSE);
gst_memory_resize(mem, plane.offset, plane.length);
gst_mini_object_set_qdata(GST_MINI_OBJECT(mem), getQuark(), this, nullptr);
GST_MINI_OBJECT(mem)->dispose = gst_libcamera_allocator_release;
g_object_unref(mem->allocator);
planes_.push_back(mem);
}
}
FrameWrap::~FrameWrap()
{
for (GstMemory *mem : planes_) {
GST_MINI_OBJECT(mem)->dispose = nullptr;
g_object_ref(mem->allocator);
gst_memory_unref(mem);
}
}
GQuark FrameWrap::getQuark()
{
static gsize frame_quark = 0;
if (g_once_init_enter(&frame_quark)) {
GQuark quark = g_quark_from_string("GstLibcameraFrameWrap");
g_once_init_leave(&frame_quark, quark);
}
return frame_quark;
}
/**
* \struct _GstLibcameraAllocator
* \brief A pooling GstDmaBufAllocator for libcamera
*
* This is a pooling GstDmaBufAllocator implementation. This implementation override
* the dispose function of memory object in order to keep them alive when they
* are disposed by downstream elements.
*/
struct _GstLibcameraAllocator {
GstDmaBufAllocator parent;
FrameBufferAllocator *fb_allocator;
/*
* A hash table using Stream pointer as key and returning a GQueue of
* FrameWrap.
*/
GHashTable *pools;
};
G_DEFINE_TYPE(GstLibcameraAllocator, gst_libcamera_allocator,
GST_TYPE_DMABUF_ALLOCATOR)
static gboolean
gst_libcamera_allocator_release(GstMiniObject *mini_object)
{
GstMemory *mem = GST_MEMORY_CAST(mini_object);
GstLibcameraAllocator *self = GST_LIBCAMERA_ALLOCATOR(mem->allocator);
{
GLibLocker lock(GST_OBJECT(self));
auto *frame = reinterpret_cast<FrameWrap *>(gst_mini_object_get_qdata(mini_object, FrameWrap::getQuark()));
gst_memory_ref(mem);
if (frame->releasePlane()) {
auto *pool = reinterpret_cast<GQueue *>(g_hash_table_lookup(self->pools, frame->stream_));
g_return_val_if_fail(pool, TRUE);
g_queue_push_tail(pool, frame);
}
}
/* Keep last in case we are holding on the last allocator ref. */
g_object_unref(mem->allocator);
/* Return FALSE so that our mini object isn't freed. */
return FALSE;
}
static void
gst_libcamera_allocator_free_pool(gpointer data)
{
GQueue *queue = reinterpret_cast<GQueue *>(data);
FrameWrap *frame;
while ((frame = reinterpret_cast<FrameWrap *>(g_queue_pop_head(queue)))) {
g_warn_if_fail(frame->outstandingPlanes_ == 0);
delete frame;
}
g_queue_free(queue);
}
static void
gst_libcamera_allocator_init(GstLibcameraAllocator *self)
{
self->pools = g_hash_table_new_full(nullptr, nullptr, nullptr,
gst_libcamera_allocator_free_pool);
GST_OBJECT_FLAG_SET(self, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
}
static void
gst_libcamera_allocator_dispose(GObject *object)
{
GstLibcameraAllocator *self = GST_LIBCAMERA_ALLOCATOR(object);
if (self->pools) {
g_hash_table_unref(self->pools);
self->pools = nullptr;
}
G_OBJECT_CLASS(gst_libcamera_allocator_parent_class)->dispose(object);
}
static void
gst_libcamera_allocator_finalize(GObject *object)
{
GstLibcameraAllocator *self = GST_LIBCAMERA_ALLOCATOR(object);
delete self->fb_allocator;
G_OBJECT_CLASS(gst_libcamera_allocator_parent_class)->finalize(object);
}
static void
gst_libcamera_allocator_class_init(GstLibcameraAllocatorClass *klass)
{
auto *allocator_class = GST_ALLOCATOR_CLASS(klass);
auto *object_class = G_OBJECT_CLASS(klass);
object_class->dispose = gst_libcamera_allocator_dispose;
object_class->finalize = gst_libcamera_allocator_finalize;
allocator_class->alloc = nullptr;
}
GstLibcameraAllocator *
gst_libcamera_allocator_new(std::shared_ptr<Camera> camera,
CameraConfiguration *config_)
{
auto *self = GST_LIBCAMERA_ALLOCATOR(g_object_new(GST_TYPE_LIBCAMERA_ALLOCATOR,
nullptr));
self->fb_allocator = new FrameBufferAllocator(camera);
for (StreamConfiguration &streamCfg : *config_) {
Stream *stream = streamCfg.stream();
gint ret;
ret = self->fb_allocator->allocate(stream);
if (ret == 0)
return nullptr;
GQueue *pool = g_queue_new();
for (const std::unique_ptr<FrameBuffer> &buffer :
self->fb_allocator->buffers(stream)) {
auto *fb = new FrameWrap(GST_ALLOCATOR(self),
buffer.get(), stream);
g_queue_push_tail(pool, fb);
}
g_hash_table_insert(self->pools, stream, pool);
}
return self;
}
bool
gst_libcamera_allocator_prepare_buffer(GstLibcameraAllocator *self,
Stream *stream, GstBuffer *buffer)
{
GLibLocker lock(GST_OBJECT(self));
auto *pool = reinterpret_cast<GQueue *>(g_hash_table_lookup(self->pools, stream));
g_return_val_if_fail(pool, false);
auto *frame = reinterpret_cast<FrameWrap *>(g_queue_pop_head(pool));
if (!frame)
return false;
for (GstMemory *mem : frame->planes_) {
frame->acquirePlane();
gst_buffer_append_memory(buffer, mem);
g_object_ref(mem->allocator);
}
return true;
}
gsize
gst_libcamera_allocator_get_pool_size(GstLibcameraAllocator *self,
Stream *stream)
{
GLibLocker lock(GST_OBJECT(self));
auto *pool = reinterpret_cast<GQueue *>(g_hash_table_lookup(self->pools, stream));
g_return_val_if_fail(pool, false);
return pool->length;
}
FrameBuffer *
gst_libcamera_memory_get_frame_buffer(GstMemory *mem)
{
auto *frame = reinterpret_cast<FrameWrap *>(gst_mini_object_get_qdata(GST_MINI_OBJECT_CAST(mem),
FrameWrap::getQuark()));
return frame->buffer_;
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamera-utils.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer libcamera Utility Function
*/
#include "gstlibcamera-utils.h"
#include <libcamera/control_ids.h>
#include <libcamera/formats.h>
using namespace libcamera;
static struct {
GstVideoFormat gst_format;
PixelFormat format;
} format_map[] = {
/* Compressed */
{ GST_VIDEO_FORMAT_ENCODED, formats::MJPEG },
/* Bayer formats, gstreamer only supports 8-bit */
{ GST_VIDEO_FORMAT_ENCODED, formats::SBGGR8 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGBRG8 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGRBG8 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SRGGB8 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SBGGR10 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGBRG10 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGRBG10 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SRGGB10 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SBGGR12 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGBRG12 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGRBG12 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SRGGB12 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SBGGR14 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGBRG14 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGRBG14 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SRGGB14 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SBGGR16 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGBRG16 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SGRBG16 },
{ GST_VIDEO_FORMAT_ENCODED, formats::SRGGB16 },
/* Monochrome */
{ GST_VIDEO_FORMAT_GRAY8, formats::R8 },
{ GST_VIDEO_FORMAT_GRAY16_LE, formats::R16 },
/* RGB16 */
{ GST_VIDEO_FORMAT_RGB16, formats::RGB565 },
/* RGB24 */
{ GST_VIDEO_FORMAT_RGB, formats::BGR888 },
{ GST_VIDEO_FORMAT_BGR, formats::RGB888 },
/* RGB32 */
{ GST_VIDEO_FORMAT_BGRx, formats::XRGB8888 },
{ GST_VIDEO_FORMAT_RGBx, formats::XBGR8888 },
{ GST_VIDEO_FORMAT_xBGR, formats::RGBX8888 },
{ GST_VIDEO_FORMAT_xRGB, formats::BGRX8888 },
{ GST_VIDEO_FORMAT_BGRA, formats::ARGB8888 },
{ GST_VIDEO_FORMAT_RGBA, formats::ABGR8888 },
{ GST_VIDEO_FORMAT_ABGR, formats::RGBA8888 },
{ GST_VIDEO_FORMAT_ARGB, formats::BGRA8888 },
/* YUV Semiplanar */
{ GST_VIDEO_FORMAT_NV12, formats::NV12 },
{ GST_VIDEO_FORMAT_NV21, formats::NV21 },
{ GST_VIDEO_FORMAT_NV16, formats::NV16 },
{ GST_VIDEO_FORMAT_NV61, formats::NV61 },
{ GST_VIDEO_FORMAT_NV24, formats::NV24 },
/* YUV Planar */
{ GST_VIDEO_FORMAT_I420, formats::YUV420 },
{ GST_VIDEO_FORMAT_YV12, formats::YVU420 },
{ GST_VIDEO_FORMAT_Y42B, formats::YUV422 },
/* YUV Packed */
{ GST_VIDEO_FORMAT_UYVY, formats::UYVY },
{ GST_VIDEO_FORMAT_VYUY, formats::VYUY },
{ GST_VIDEO_FORMAT_YUY2, formats::YUYV },
{ GST_VIDEO_FORMAT_YVYU, formats::YVYU },
/* \todo NV42 is used in libcamera but is not mapped in GStreamer yet. */
};
static GstVideoColorimetry
colorimetry_from_colorspace(const ColorSpace &colorSpace)
{
GstVideoColorimetry colorimetry;
switch (colorSpace.primaries) {
case ColorSpace::Primaries::Raw:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
break;
case ColorSpace::Primaries::Smpte170m:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
break;
case ColorSpace::Primaries::Rec709:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
break;
case ColorSpace::Primaries::Rec2020:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
break;
}
switch (colorSpace.transferFunction) {
case ColorSpace::TransferFunction::Linear:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
break;
case ColorSpace::TransferFunction::Srgb:
colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB;
break;
case ColorSpace::TransferFunction::Rec709:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
break;
}
switch (colorSpace.ycbcrEncoding) {
case ColorSpace::YcbcrEncoding::None:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
break;
case ColorSpace::YcbcrEncoding::Rec601:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
break;
case ColorSpace::YcbcrEncoding::Rec709:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709;
break;
case ColorSpace::YcbcrEncoding::Rec2020:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
break;
}
switch (colorSpace.range) {
case ColorSpace::Range::Full:
colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
break;
case ColorSpace::Range::Limited:
colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
break;
}
return colorimetry;
}
static std::optional<ColorSpace>
colorspace_from_colorimetry(const GstVideoColorimetry &colorimetry)
{
std::optional<ColorSpace> colorspace = ColorSpace::Raw;
switch (colorimetry.primaries) {
case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
/* Unknown primaries map to raw colorspace in gstreamer */
return ColorSpace::Raw;
case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
colorspace->primaries = ColorSpace::Primaries::Smpte170m;
break;
case GST_VIDEO_COLOR_PRIMARIES_BT709:
colorspace->primaries = ColorSpace::Primaries::Rec709;
break;
case GST_VIDEO_COLOR_PRIMARIES_BT2020:
colorspace->primaries = ColorSpace::Primaries::Rec2020;
break;
default:
GST_WARNING("Colorimetry primaries %d not mapped in gstlibcamera",
colorimetry.primaries);
return std::nullopt;
}
switch (colorimetry.transfer) {
/* Transfer function mappings inspired from v4l2src plugin */
case GST_VIDEO_TRANSFER_GAMMA18:
case GST_VIDEO_TRANSFER_GAMMA20:
case GST_VIDEO_TRANSFER_GAMMA22:
case GST_VIDEO_TRANSFER_GAMMA28:
GST_WARNING("GAMMA 18, 20, 22, 28 transfer functions not supported");
[[fallthrough]];
case GST_VIDEO_TRANSFER_GAMMA10:
colorspace->transferFunction = ColorSpace::TransferFunction::Linear;
break;
case GST_VIDEO_TRANSFER_SRGB:
colorspace->transferFunction = ColorSpace::TransferFunction::Srgb;
break;
#if GST_CHECK_VERSION(1, 18, 0)
case GST_VIDEO_TRANSFER_BT601:
case GST_VIDEO_TRANSFER_BT2020_10:
#endif
case GST_VIDEO_TRANSFER_BT2020_12:
case GST_VIDEO_TRANSFER_BT709:
colorspace->transferFunction = ColorSpace::TransferFunction::Rec709;
break;
default:
GST_WARNING("Colorimetry transfer function %d not mapped in gstlibcamera",
colorimetry.transfer);
return std::nullopt;
}
switch (colorimetry.matrix) {
case GST_VIDEO_COLOR_MATRIX_RGB:
colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::None;
break;
/* FCC is about the same as BT601 with less digit */
case GST_VIDEO_COLOR_MATRIX_FCC:
case GST_VIDEO_COLOR_MATRIX_BT601:
colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec601;
break;
case GST_VIDEO_COLOR_MATRIX_BT709:
colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec709;
break;
case GST_VIDEO_COLOR_MATRIX_BT2020:
colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec2020;
break;
default:
GST_WARNING("Colorimetry matrix %d not mapped in gstlibcamera",
colorimetry.matrix);
return std::nullopt;
}
switch (colorimetry.range) {
case GST_VIDEO_COLOR_RANGE_0_255:
colorspace->range = ColorSpace::Range::Full;
break;
case GST_VIDEO_COLOR_RANGE_16_235:
colorspace->range = ColorSpace::Range::Limited;
break;
default:
GST_WARNING("Colorimetry range %d not mapped in gstlibcamera",
colorimetry.range);
return std::nullopt;
}
return colorspace;
}
static GstVideoFormat
pixel_format_to_gst_format(const PixelFormat &format)
{
for (const auto &item : format_map) {
if (item.format == format)
return item.gst_format;
}
return GST_VIDEO_FORMAT_UNKNOWN;
}
static PixelFormat
gst_format_to_pixel_format(GstVideoFormat gst_format)
{
if (gst_format == GST_VIDEO_FORMAT_ENCODED)
return PixelFormat{};
for (const auto &item : format_map)
if (item.gst_format == gst_format)
return item.format;
return PixelFormat{};
}
static const gchar *
bayer_format_to_string(int format)
{
switch (format) {
case formats::SBGGR8:
return "bggr";
case formats::SGBRG8:
return "gbrg";
case formats::SGRBG8:
return "grbg";
case formats::SRGGB8:
return "rggb";
case formats::SBGGR10:
return "bggr10le";
case formats::SGBRG10:
return "gbrg10le";
case formats::SGRBG10:
return "grbg10le";
case formats::SRGGB10:
return "rggb10le";
case formats::SBGGR12:
return "bggr12le";
case formats::SGBRG12:
return "gbrg12le";
case formats::SGRBG12:
return "grbg12le";
case formats::SRGGB12:
return "rggb12le";
case formats::SBGGR14:
return "bggr14le";
case formats::SGBRG14:
return "gbrg14le";
case formats::SGRBG14:
return "grbg14le";
case formats::SRGGB14:
return "rggb14le";
case formats::SBGGR16:
return "bggr16le";
case formats::SGBRG16:
return "gbrg16le";
case formats::SGRBG16:
return "grbg16le";
case formats::SRGGB16:
return "rggb16le";
}
return NULL;
}
static GstStructure *
bare_structure_from_format(const PixelFormat &format)
{
GstVideoFormat gst_format = pixel_format_to_gst_format(format);
if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
return nullptr;
if (gst_format != GST_VIDEO_FORMAT_ENCODED)
return gst_structure_new("video/x-raw", "format", G_TYPE_STRING,
gst_video_format_to_string(gst_format), nullptr);
switch (format) {
case formats::MJPEG:
return gst_structure_new_empty("image/jpeg");
case formats::SBGGR8:
case formats::SGBRG8:
case formats::SGRBG8:
case formats::SRGGB8:
return gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
bayer_format_to_string(format), nullptr);
default:
return nullptr;
}
}
GstCaps *
gst_libcamera_stream_formats_to_caps(const StreamFormats &formats)
{
GstCaps *caps = gst_caps_new_empty();
for (PixelFormat pixelformat : formats.pixelformats()) {
g_autoptr(GstStructure) bare_s = bare_structure_from_format(pixelformat);
if (!bare_s) {
GST_WARNING("Unsupported DRM format %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS(pixelformat));
continue;
}
for (const Size &size : formats.sizes(pixelformat)) {
GstStructure *s = gst_structure_copy(bare_s);
gst_structure_set(s,
"width", G_TYPE_INT, size.width,
"height", G_TYPE_INT, size.height,
nullptr);
gst_caps_append_structure(caps, s);
}
const SizeRange &range = formats.range(pixelformat);
if (range.hStep && range.vStep) {
GstStructure *s = gst_structure_copy(bare_s);
GValue val = G_VALUE_INIT;
g_value_init(&val, GST_TYPE_INT_RANGE);
gst_value_set_int_range_step(&val, range.min.width, range.max.width, range.hStep);
gst_structure_set_value(s, "width", &val);
gst_value_set_int_range_step(&val, range.min.height, range.max.height, range.vStep);
gst_structure_set_value(s, "height", &val);
g_value_unset(&val);
gst_caps_append_structure(caps, s);
}
}
return caps;
}
GstCaps *
gst_libcamera_stream_configuration_to_caps(const StreamConfiguration &stream_cfg)
{
GstCaps *caps = gst_caps_new_empty();
GstStructure *s = bare_structure_from_format(stream_cfg.pixelFormat);
gst_structure_set(s,
"width", G_TYPE_INT, stream_cfg.size.width,
"height", G_TYPE_INT, stream_cfg.size.height,
nullptr);
if (stream_cfg.colorSpace) {
GstVideoColorimetry colorimetry = colorimetry_from_colorspace(stream_cfg.colorSpace.value());
g_autofree gchar *colorimetry_str = gst_video_colorimetry_to_string(&colorimetry);
if (colorimetry_str)
gst_structure_set(s, "colorimetry", G_TYPE_STRING, colorimetry_str, nullptr);
else
g_error("Got invalid colorimetry from ColorSpace: %s",
ColorSpace::toString(stream_cfg.colorSpace).c_str());
}
gst_caps_append_structure(caps, s);
return caps;
}
void
gst_libcamera_configure_stream_from_caps(StreamConfiguration &stream_cfg,
GstCaps *caps)
{
GstVideoFormat gst_format = pixel_format_to_gst_format(stream_cfg.pixelFormat);
guint i;
gint best_fixed = -1, best_in_range = -1;
GstStructure *s;
/*
* These are delta weight computed from:
* ABS(width - stream_cfg.size.width) * ABS(height - stream_cfg.size.height)
*/
guint best_fixed_delta = G_MAXUINT;
guint best_in_range_delta = G_MAXUINT;
/* First fixate the caps using default configuration value. */
g_assert(gst_caps_is_writable(caps));
/* Lookup the structure for a close match to the stream_cfg.size */
for (i = 0; i < gst_caps_get_size(caps); i++) {
s = gst_caps_get_structure(caps, i);
gint width, height;
guint delta;
if (gst_structure_has_field_typed(s, "width", G_TYPE_INT) &&
gst_structure_has_field_typed(s, "height", G_TYPE_INT)) {
gst_structure_get_int(s, "width", &width);
gst_structure_get_int(s, "height", &height);
delta = ABS(width - (gint)stream_cfg.size.width) * ABS(height - (gint)stream_cfg.size.height);
if (delta < best_fixed_delta) {
best_fixed_delta = delta;
best_fixed = i;
}
} else {
gst_structure_fixate_field_nearest_int(s, "width", stream_cfg.size.width);
gst_structure_fixate_field_nearest_int(s, "height", stream_cfg.size.height);
gst_structure_get_int(s, "width", &width);
gst_structure_get_int(s, "height", &height);
delta = ABS(width - (gint)stream_cfg.size.width) * ABS(height - (gint)stream_cfg.size.height);
if (delta < best_in_range_delta) {
best_in_range_delta = delta;
best_in_range = i;
}
}
}
/* Prefer reliable fixed value over ranges */
if (best_fixed >= 0)
s = gst_caps_get_structure(caps, best_fixed);
else
s = gst_caps_get_structure(caps, best_in_range);
if (gst_structure_has_name(s, "video/x-raw")) {
const gchar *format = gst_video_format_to_string(gst_format);
gst_structure_fixate_field_string(s, "format", format);
}
/* Then configure the stream with the result. */
if (gst_structure_has_name(s, "video/x-raw")) {
const gchar *format = gst_structure_get_string(s, "format");
gst_format = gst_video_format_from_string(format);
stream_cfg.pixelFormat = gst_format_to_pixel_format(gst_format);
} else if (gst_structure_has_name(s, "image/jpeg")) {
stream_cfg.pixelFormat = formats::MJPEG;
} else {
g_critical("Unsupported media type: %s", gst_structure_get_name(s));
}
gint width, height;
gst_structure_get_int(s, "width", &width);
gst_structure_get_int(s, "height", &height);
stream_cfg.size.width = width;
stream_cfg.size.height = height;
/* Configure colorimetry */
if (gst_structure_has_field(s, "colorimetry")) {
const gchar *colorimetry_str = gst_structure_get_string(s, "colorimetry");
GstVideoColorimetry colorimetry;
if (!gst_video_colorimetry_from_string(&colorimetry, colorimetry_str))
g_critical("Invalid colorimetry %s", colorimetry_str);
stream_cfg.colorSpace = colorspace_from_colorimetry(colorimetry);
}
}
void gst_libcamera_get_framerate_from_caps(GstCaps *caps,
GstStructure *element_caps)
{
GstStructure *s = gst_caps_get_structure(caps, 0);
/*
* Default to 30 fps. If the "framerate" fraction is invalid below,
* libcamerasrc will set 30fps as the framerate.
*/
gint fps_n = 30, fps_d = 1;
if (gst_structure_has_field_typed(s, "framerate", GST_TYPE_FRACTION)) {
if (!gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d))
GST_WARNING("Invalid framerate in the caps");
}
gst_structure_set(element_caps, "framerate", GST_TYPE_FRACTION,
fps_n, fps_d, nullptr);
}
void gst_libcamera_clamp_and_set_frameduration(ControlList &initCtrls,
const ControlInfoMap &cam_ctrls,
GstStructure *element_caps)
{
gint fps_caps_n, fps_caps_d;
if (!gst_structure_has_field_typed(element_caps, "framerate", GST_TYPE_FRACTION))
return;
auto iterFrameDuration = cam_ctrls.find(&controls::FrameDurationLimits);
if (iterFrameDuration == cam_ctrls.end()) {
GST_WARNING("FrameDurationLimits not found in camera controls.");
return;
}
const GValue *framerate = gst_structure_get_value(element_caps, "framerate");
fps_caps_n = gst_value_get_fraction_numerator(framerate);
fps_caps_d = gst_value_get_fraction_denominator(framerate);
int64_t target_duration = (fps_caps_d * 1000000.0) / fps_caps_n;
int64_t min_frame_duration = iterFrameDuration->second.min().get<int64_t>();
int64_t max_frame_duration = iterFrameDuration->second.max().get<int64_t>();
int64_t frame_duration = std::clamp(target_duration,
min_frame_duration,
max_frame_duration);
if (frame_duration != target_duration) {
gint framerate_clamped = 1000000 / frame_duration;
/*
* Update the clamped framerate which then will be exposed in
* downstream caps.
*/
gst_structure_set(element_caps, "framerate", GST_TYPE_FRACTION,
framerate_clamped, 1, nullptr);
}
initCtrls.set(controls::FrameDurationLimits,
{ frame_duration, frame_duration });
}
void gst_libcamera_framerate_to_caps(GstCaps *caps, const GstStructure *element_caps)
{
const GValue *framerate = gst_structure_get_value(element_caps, "framerate");
if (!GST_VALUE_HOLDS_FRACTION(framerate))
return;
GstStructure *s = gst_caps_get_structure(caps, 0);
gint fps_caps_n, fps_caps_d;
fps_caps_n = gst_value_get_fraction_numerator(framerate);
fps_caps_d = gst_value_get_fraction_denominator(framerate);
gst_structure_set(s, "framerate", GST_TYPE_FRACTION, fps_caps_n, fps_caps_d, nullptr);
}
#if !GST_CHECK_VERSION(1, 17, 1)
gboolean
gst_task_resume(GstTask *task)
{
/* We only want to resume the task if it's paused. */
GLibLocker lock(GST_OBJECT(task));
if (GST_TASK_STATE(task) != GST_TASK_PAUSED)
return FALSE;
GST_TASK_STATE(task) = GST_TASK_STARTED;
GST_TASK_SIGNAL(task);
return TRUE;
}
#endif
G_LOCK_DEFINE_STATIC(cm_singleton_lock);
static std::weak_ptr<CameraManager> cm_singleton_ptr;
std::shared_ptr<CameraManager>
gst_libcamera_get_camera_manager(int &ret)
{
std::shared_ptr<CameraManager> cm;
G_LOCK(cm_singleton_lock);
cm = cm_singleton_ptr.lock();
if (!cm) {
cm = std::make_shared<CameraManager>();
cm_singleton_ptr = cm;
ret = cm->start();
} else {
ret = 0;
}
G_UNLOCK(cm_singleton_lock);
return cm;
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerapad.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Capture Element
*/
#pragma once
#include "gstlibcamerapool.h"
#include <gst/gst.h>
#include <libcamera/stream.h>
#define GST_TYPE_LIBCAMERA_PAD gst_libcamera_pad_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraPad, gst_libcamera_pad, GST_LIBCAMERA, PAD, GstPad)
libcamera::StreamRole gst_libcamera_pad_get_role(GstPad *pad);
GstLibcameraPool *gst_libcamera_pad_get_pool(GstPad *pad);
void gst_libcamera_pad_set_pool(GstPad *pad, GstLibcameraPool *pool);
libcamera::Stream *gst_libcamera_pad_get_stream(GstPad *pad);
void gst_libcamera_pad_set_latency(GstPad *pad, GstClockTime latency);
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamera.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer plugin
*/
#include "gstlibcameraprovider.h"
#include "gstlibcamerasrc.h"
static gboolean
plugin_init(GstPlugin *plugin)
{
if (!gst_element_register(plugin, "libcamerasrc", GST_RANK_PRIMARY,
GST_TYPE_LIBCAMERA_SRC) ||
!gst_device_provider_register(plugin, "libcameraprovider",
GST_RANK_PRIMARY,
GST_TYPE_LIBCAMERA_PROVIDER))
return FALSE;
return TRUE;
}
GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR,
libcamera, "libcamera capture plugin",
plugin_init, VERSION, "LGPL", PACKAGE, "https://libcamera.org")
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcameraallocator.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Custom Allocator
*/
#pragma once
#include <gst/gst.h>
#include <gst/allocators/allocators.h>
#include <libcamera/camera.h>
#include <libcamera/stream.h>
#define GST_TYPE_LIBCAMERA_ALLOCATOR gst_libcamera_allocator_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraAllocator, gst_libcamera_allocator,
GST_LIBCAMERA, ALLOCATOR, GstDmaBufAllocator)
GstLibcameraAllocator *gst_libcamera_allocator_new(std::shared_ptr<libcamera::Camera> camera,
libcamera::CameraConfiguration *config_);
bool gst_libcamera_allocator_prepare_buffer(GstLibcameraAllocator *self,
libcamera::Stream *stream,
GstBuffer *buffer);
gsize gst_libcamera_allocator_get_pool_size(GstLibcameraAllocator *allocator,
libcamera::Stream *stream);
libcamera::FrameBuffer *gst_libcamera_memory_get_frame_buffer(GstMemory *mem);
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamera-utils.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer libcamera Utility Functions
*/
#pragma once
#include <libcamera/camera_manager.h>
#include <libcamera/controls.h>
#include <libcamera/stream.h>
#include <gst/gst.h>
#include <gst/video/video.h>
GstCaps *gst_libcamera_stream_formats_to_caps(const libcamera::StreamFormats &formats);
GstCaps *gst_libcamera_stream_configuration_to_caps(const libcamera::StreamConfiguration &stream_cfg);
void gst_libcamera_configure_stream_from_caps(libcamera::StreamConfiguration &stream_cfg,
GstCaps *caps);
void gst_libcamera_get_framerate_from_caps(GstCaps *caps, GstStructure *element_caps);
void gst_libcamera_clamp_and_set_frameduration(libcamera::ControlList &controls,
const libcamera::ControlInfoMap &camera_controls,
GstStructure *element_caps);
void gst_libcamera_framerate_to_caps(GstCaps *caps, const GstStructure *element_caps);
#if !GST_CHECK_VERSION(1, 16, 0)
static inline void gst_clear_event(GstEvent **event_ptr)
{
g_clear_pointer(event_ptr, gst_mini_object_unref);
}
#endif
#if !GST_CHECK_VERSION(1, 17, 1)
gboolean gst_task_resume(GstTask *task);
#endif
std::shared_ptr<libcamera::CameraManager> gst_libcamera_get_camera_manager(int &ret);
/**
* \class GLibLocker
* \brief A simple scoped mutex locker for GMutex
*/
class GLibLocker
{
public:
GLibLocker(GMutex *mutex)
: mutex_(mutex)
{
g_mutex_lock(mutex_);
}
GLibLocker(GstObject *object)
: mutex_(GST_OBJECT_GET_LOCK(object))
{
g_mutex_lock(mutex_);
}
~GLibLocker()
{
g_mutex_unlock(mutex_);
}
private:
GMutex *mutex_;
};
/**
* \class GLibRecLocker
* \brief A simple scoped mutex locker for GRecMutex
*/
class GLibRecLocker
{
public:
GLibRecLocker(GRecMutex *mutex)
: mutex_(mutex)
{
g_rec_mutex_lock(mutex_);
}
~GLibRecLocker()
{
g_rec_mutex_unlock(mutex_);
}
private:
GRecMutex *mutex_;
};
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcameraprovider.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Device Provider
*/
#pragma once
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_LIBCAMERA_PROVIDER gst_libcamera_provider_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraProvider, gst_libcamera_provider,
GST_LIBCAMERA, PROVIDER, GstDeviceProvider)
G_END_DECLS
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerapool.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Buffer Pool
*/
#include "gstlibcamerapool.h"
#include <libcamera/stream.h>
#include "gstlibcamera-utils.h"
using namespace libcamera;
enum {
SIGNAL_BUFFER_NOTIFY,
N_SIGNALS
};
static guint signals[N_SIGNALS];
struct _GstLibcameraPool {
GstBufferPool parent;
GstAtomicQueue *queue;
GstLibcameraAllocator *allocator;
Stream *stream;
};
G_DEFINE_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_TYPE_BUFFER_POOL)
static GstFlowReturn
gst_libcamera_pool_acquire_buffer(GstBufferPool *pool, GstBuffer **buffer,
[[maybe_unused]] GstBufferPoolAcquireParams *params)
{
GstLibcameraPool *self = GST_LIBCAMERA_POOL(pool);
GstBuffer *buf = GST_BUFFER(gst_atomic_queue_pop(self->queue));
if (!buf)
return GST_FLOW_ERROR;
if (!gst_libcamera_allocator_prepare_buffer(self->allocator, self->stream, buf)) {
gst_atomic_queue_push(self->queue, buf);
return GST_FLOW_ERROR;
}
*buffer = buf;
return GST_FLOW_OK;
}
static void
gst_libcamera_pool_reset_buffer(GstBufferPool *pool, GstBuffer *buffer)
{
GstBufferPoolClass *klass = GST_BUFFER_POOL_CLASS(gst_libcamera_pool_parent_class);
/* Clears all the memories and only pool the GstBuffer objects */
gst_buffer_remove_all_memory(buffer);
klass->reset_buffer(pool, buffer);
GST_BUFFER_FLAGS(buffer) = 0;
}
static void
gst_libcamera_pool_release_buffer(GstBufferPool *pool, GstBuffer *buffer)
{
GstLibcameraPool *self = GST_LIBCAMERA_POOL(pool);
bool do_notify = gst_atomic_queue_length(self->queue) == 0;
gst_atomic_queue_push(self->queue, buffer);
if (do_notify)
g_signal_emit(self, signals[SIGNAL_BUFFER_NOTIFY], 0);
}
static void
gst_libcamera_pool_init(GstLibcameraPool *self)
{
self->queue = gst_atomic_queue_new(4);
}
static void
gst_libcamera_pool_finalize(GObject *object)
{
GstLibcameraPool *self = GST_LIBCAMERA_POOL(object);
GstBuffer *buf;
while ((buf = GST_BUFFER(gst_atomic_queue_pop(self->queue))))
gst_buffer_unref(buf);
gst_atomic_queue_unref(self->queue);
g_object_unref(self->allocator);
G_OBJECT_CLASS(gst_libcamera_pool_parent_class)->finalize(object);
}
static void
gst_libcamera_pool_class_init(GstLibcameraPoolClass *klass)
{
auto *object_class = G_OBJECT_CLASS(klass);
auto *pool_class = GST_BUFFER_POOL_CLASS(klass);
object_class->finalize = gst_libcamera_pool_finalize;
pool_class->start = nullptr;
pool_class->acquire_buffer = gst_libcamera_pool_acquire_buffer;
pool_class->reset_buffer = gst_libcamera_pool_reset_buffer;
pool_class->release_buffer = gst_libcamera_pool_release_buffer;
signals[SIGNAL_BUFFER_NOTIFY] = g_signal_new("buffer-notify",
G_OBJECT_CLASS_TYPE(klass), G_SIGNAL_RUN_LAST,
0, nullptr, nullptr, nullptr,
G_TYPE_NONE, 0);
}
GstLibcameraPool *
gst_libcamera_pool_new(GstLibcameraAllocator *allocator, Stream *stream)
{
auto *pool = GST_LIBCAMERA_POOL(g_object_new(GST_TYPE_LIBCAMERA_POOL, nullptr));
pool->allocator = GST_LIBCAMERA_ALLOCATOR(g_object_ref(allocator));
pool->stream = stream;
gsize pool_size = gst_libcamera_allocator_get_pool_size(allocator, stream);
for (gsize i = 0; i < pool_size; i++) {
GstBuffer *buffer = gst_buffer_new();
gst_atomic_queue_push(pool->queue, buffer);
}
return pool;
}
Stream *
gst_libcamera_pool_get_stream(GstLibcameraPool *self)
{
return self->stream;
}
FrameBuffer *
gst_libcamera_buffer_get_frame_buffer(GstBuffer *buffer)
{
GstMemory *mem = gst_buffer_peek_memory(buffer, 0);
return gst_libcamera_memory_get_frame_buffer(mem);
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerapad.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Capture Pad
*/
#include "gstlibcamerapad.h"
#include <libcamera/stream.h>
#include "gstlibcamera-utils.h"
using namespace libcamera;
struct _GstLibcameraPad {
GstPad parent;
StreamRole role;
GstLibcameraPool *pool;
GstClockTime latency;
};
enum {
PROP_0,
PROP_STREAM_ROLE
};
G_DEFINE_TYPE(GstLibcameraPad, gst_libcamera_pad, GST_TYPE_PAD)
static void
gst_libcamera_pad_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec)
{
auto *self = GST_LIBCAMERA_PAD(object);
GLibLocker lock(GST_OBJECT(self));
switch (prop_id) {
case PROP_STREAM_ROLE:
self->role = (StreamRole)g_value_get_enum(value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void
gst_libcamera_pad_get_property(GObject *object, guint prop_id, GValue *value,
GParamSpec *pspec)
{
auto *self = GST_LIBCAMERA_PAD(object);
GLibLocker lock(GST_OBJECT(self));
switch (prop_id) {
case PROP_STREAM_ROLE:
g_value_set_enum(value, static_cast<gint>(self->role));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static gboolean
gst_libcamera_pad_query(GstPad *pad, GstObject *parent, GstQuery *query)
{
auto *self = GST_LIBCAMERA_PAD(pad);
if (query->type != GST_QUERY_LATENCY)
return gst_pad_query_default(pad, parent, query);
/* TRUE here means live, we assumes that max latency is the same as min
* as we have no idea that duration of frames. */
gst_query_set_latency(query, TRUE, self->latency, self->latency);
return TRUE;
}
static void
gst_libcamera_pad_init(GstLibcameraPad *self)
{
GST_PAD_QUERYFUNC(self) = gst_libcamera_pad_query;
}
static GType
gst_libcamera_stream_role_get_type()
{
static GType type = 0;
static const GEnumValue values[] = {
{
static_cast<gint>(StreamRole::StillCapture),
"libcamera::StillCapture",
"still-capture",
}, {
static_cast<gint>(StreamRole::VideoRecording),
"libcamera::VideoRecording",
"video-recording",
}, {
static_cast<gint>(StreamRole::Viewfinder),
"libcamera::Viewfinder",
"view-finder",
},
{ 0, NULL, NULL }
};
if (!type)
type = g_enum_register_static("GstLibcameraStreamRole", values);
return type;
}
static void
gst_libcamera_pad_class_init(GstLibcameraPadClass *klass)
{
auto *object_class = G_OBJECT_CLASS(klass);
object_class->set_property = gst_libcamera_pad_set_property;
object_class->get_property = gst_libcamera_pad_get_property;
auto *spec = g_param_spec_enum("stream-role", "Stream Role",
"The selected stream role",
gst_libcamera_stream_role_get_type(),
static_cast<gint>(StreamRole::VideoRecording),
(GParamFlags)(GST_PARAM_MUTABLE_READY
| G_PARAM_CONSTRUCT
| G_PARAM_READWRITE
| G_PARAM_STATIC_STRINGS));
g_object_class_install_property(object_class, PROP_STREAM_ROLE, spec);
}
StreamRole
gst_libcamera_pad_get_role(GstPad *pad)
{
auto *self = GST_LIBCAMERA_PAD(pad);
GLibLocker lock(GST_OBJECT(self));
return self->role;
}
GstLibcameraPool *
gst_libcamera_pad_get_pool(GstPad *pad)
{
auto *self = GST_LIBCAMERA_PAD(pad);
return self->pool;
}
void
gst_libcamera_pad_set_pool(GstPad *pad, GstLibcameraPool *pool)
{
auto *self = GST_LIBCAMERA_PAD(pad);
if (self->pool)
g_object_unref(self->pool);
self->pool = pool;
}
Stream *
gst_libcamera_pad_get_stream(GstPad *pad)
{
auto *self = GST_LIBCAMERA_PAD(pad);
if (self->pool)
return gst_libcamera_pool_get_stream(self->pool);
return nullptr;
}
void
gst_libcamera_pad_set_latency(GstPad *pad, GstClockTime latency)
{
auto *self = GST_LIBCAMERA_PAD(pad);
GLibLocker lock(GST_OBJECT(self));
self->latency = latency;
}
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcamerapool.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Buffer Pool
*
* This is a partial implementation of GstBufferPool intended for internal use
* only. This pool cannot be configured or activated.
*/
#pragma once
#include "gstlibcameraallocator.h"
#include <gst/gst.h>
#include <libcamera/stream.h>
#define GST_TYPE_LIBCAMERA_POOL gst_libcamera_pool_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_LIBCAMERA, POOL, GstBufferPool)
GstLibcameraPool *gst_libcamera_pool_new(GstLibcameraAllocator *allocator,
libcamera::Stream *stream);
libcamera::Stream *gst_libcamera_pool_get_stream(GstLibcameraPool *self);
libcamera::FrameBuffer *gst_libcamera_buffer_get_frame_buffer(GstBuffer *buffer);
|
0 | repos/libcamera/src | repos/libcamera/src/gstreamer/gstlibcameraprovider.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Collabora Ltd.
* Author: Nicolas Dufresne <[email protected]>
*
* GStreamer Device Provider
*/
#include <array>
#include "gstlibcameraprovider.h"
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
#include "gstlibcamerasrc.h"
#include "gstlibcamera-utils.h"
using namespace libcamera;
GST_DEBUG_CATEGORY_STATIC(provider_debug);
#define GST_CAT_DEFAULT provider_debug
/**
* \struct _GstLibcameraDevice
* \brief libcamera GstDevice implementation
*
* This object is used by GstLibcameraProvider to abstract a libcamera
* device. It also provides helpers to create and configure the
* libcamerasrc GstElement to be used with this device. The implementation is
* private to the plugin.
*/
enum {
PROP_DEVICE_NAME = 1,
PROP_AUTO_FOCUS_MODE = 2,
};
#define GST_TYPE_LIBCAMERA_DEVICE gst_libcamera_device_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraDevice, gst_libcamera_device,
GST_LIBCAMERA, DEVICE, GstDevice)
struct _GstLibcameraDevice {
GstDevice parent;
gchar *name;
controls::AfModeEnum auto_focus_mode = controls::AfModeManual;
};
G_DEFINE_TYPE(GstLibcameraDevice, gst_libcamera_device, GST_TYPE_DEVICE)
static GstElement *
gst_libcamera_device_create_element(GstDevice *device, const gchar *name)
{
GstElement *source = gst_element_factory_make("libcamerasrc", name);
/*
* Provider and source lives in the same plugin, so making the source
* should never fail.
*/
g_assert(source);
g_object_set(source, "camera-name", GST_LIBCAMERA_DEVICE(device)->name, nullptr);
g_object_set(source, "auto-focus-mode", GST_LIBCAMERA_DEVICE(device)->auto_focus_mode, nullptr);
return source;
}
static gboolean
gst_libcamera_device_reconfigure_element(GstDevice *device,
GstElement *element)
{
if (!GST_LIBCAMERA_IS_SRC(element))
return FALSE;
g_object_set(element, "camera-name", GST_LIBCAMERA_DEVICE(device)->name, nullptr);
return TRUE;
}
static void
gst_libcamera_device_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec)
{
GstLibcameraDevice *device = GST_LIBCAMERA_DEVICE(object);
switch (prop_id) {
case PROP_DEVICE_NAME:
device->name = g_value_dup_string(value);
break;
case PROP_AUTO_FOCUS_MODE:
device->auto_focus_mode = static_cast<controls::AfModeEnum>(g_value_get_enum(value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void
gst_libcamera_device_init([[maybe_unused]] GstLibcameraDevice *self)
{
}
static void
gst_libcamera_device_finalize(GObject *object)
{
GstLibcameraDevice *self = GST_LIBCAMERA_DEVICE(object);
gpointer klass = gst_libcamera_device_parent_class;
g_free(self->name);
G_OBJECT_CLASS(klass)->finalize(object);
}
static void
gst_libcamera_device_class_init(GstLibcameraDeviceClass *klass)
{
GstDeviceClass *device_class = GST_DEVICE_CLASS(klass);
GObjectClass *object_class = G_OBJECT_CLASS(klass);
device_class->create_element = gst_libcamera_device_create_element;
device_class->reconfigure_element = gst_libcamera_device_reconfigure_element;
object_class->set_property = gst_libcamera_device_set_property;
object_class->finalize = gst_libcamera_device_finalize;
GParamSpec *pspec = g_param_spec_string("name", "Name",
"The name of the camera device", "",
(GParamFlags)(G_PARAM_STATIC_STRINGS | G_PARAM_WRITABLE |
G_PARAM_CONSTRUCT_ONLY));
g_object_class_install_property(object_class, PROP_DEVICE_NAME, pspec);
pspec = g_param_spec_enum("auto-focus-mode",
"Set auto-focus mode",
"Available options: AfModeManual, "
"AfModeAuto or AfModeContinuous.",
gst_libcamera_auto_focus_get_type(),
static_cast<gint>(controls::AfModeManual),
G_PARAM_WRITABLE);
g_object_class_install_property(object_class, PROP_AUTO_FOCUS_MODE, pspec);
}
static GstDevice *
gst_libcamera_device_new(const std::shared_ptr<Camera> &camera)
{
static const std::array roles{ StreamRole::VideoRecording };
g_autoptr(GstCaps) caps = gst_caps_new_empty();
const gchar *name = camera->id().c_str();
std::unique_ptr<CameraConfiguration> config = camera->generateConfiguration(roles);
if (!config || config->size() != roles.size()) {
GST_ERROR("Failed to generate a default configuration for %s", name);
return nullptr;
}
for (const StreamConfiguration &stream_cfg : *config) {
GstCaps *sub_caps = gst_libcamera_stream_formats_to_caps(stream_cfg.formats());
if (sub_caps)
gst_caps_append(caps, sub_caps);
}
return GST_DEVICE(g_object_new(GST_TYPE_LIBCAMERA_DEVICE,
/* \todo Use a unique identifier instead of camera name. */
"name", name,
"display-name", name,
"caps", caps,
"device-class", "Source/Video",
nullptr));
}
/**
* \struct _GstLibcameraProvider
* \brief libcamera GstDeviceProvider implementation
*
* This GstFeature is used by GstDeviceMonitor to probe the available
* libcamera devices. The implementation is private to the plugin.
*/
struct _GstLibcameraProvider {
GstDeviceProvider parent;
};
G_DEFINE_TYPE_WITH_CODE(GstLibcameraProvider, gst_libcamera_provider,
GST_TYPE_DEVICE_PROVIDER,
GST_DEBUG_CATEGORY_INIT(provider_debug, "libcamera-provider", 0,
"libcamera Device Provider"))
static GList *
gst_libcamera_provider_probe(GstDeviceProvider *provider)
{
GstLibcameraProvider *self = GST_LIBCAMERA_PROVIDER(provider);
std::shared_ptr<CameraManager> cm;
GList *devices = nullptr;
gint ret;
GST_INFO_OBJECT(self, "Probing cameras using libcamera");
/* \todo Move the CameraMananger start()/stop() calls into
* GstDeviceProvider start()/stop() virtual function when CameraMananger
* gains monitoring support. Meanwhile we need to cycle start()/stop()
* to ensure every probe() calls return the latest list.
*/
cm = gst_libcamera_get_camera_manager(ret);
if (ret) {
GST_ERROR_OBJECT(self, "Failed to retrieve device list: %s",
g_strerror(-ret));
return nullptr;
}
for (const std::shared_ptr<Camera> &camera : cm->cameras()) {
GST_INFO_OBJECT(self, "Found camera '%s'", camera->id().c_str());
GstDevice *dev = gst_libcamera_device_new(camera);
if (!dev) {
GST_ERROR_OBJECT(self, "Failed to add camera '%s'",
camera->id().c_str());
return nullptr;
}
devices = g_list_append(devices,
g_object_ref_sink(dev));
}
return devices;
}
static void
gst_libcamera_provider_init(GstLibcameraProvider *self)
{
GstDeviceProvider *provider = GST_DEVICE_PROVIDER(self);
/* Avoid devices being duplicated. */
gst_device_provider_hide_provider(provider, "v4l2deviceprovider");
}
static void
gst_libcamera_provider_class_init(GstLibcameraProviderClass *klass)
{
GstDeviceProviderClass *provider_class = GST_DEVICE_PROVIDER_CLASS(klass);
provider_class->probe = gst_libcamera_provider_probe;
gst_device_provider_class_set_metadata(provider_class,
"libcamera Device Provider",
"Source/Video",
"List camera device using libcamera",
"Nicolas Dufresne <[email protected]>");
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_compat_manager.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* V4L2 compatibility manager
*/
#pragma once
#include <fcntl.h>
#include <map>
#include <memory>
#include <sys/types.h>
#include <vector>
#include <libcamera/camera_manager.h>
#include "v4l2_camera_proxy.h"
class V4L2CompatManager
{
public:
struct FileOperations {
using openat_func_t = int (*)(int dirfd, const char *path,
int oflag, ...);
using dup_func_t = int (*)(int oldfd);
using close_func_t = int (*)(int fd);
using ioctl_func_t = int (*)(int fd, unsigned long request, ...);
using mmap_func_t = void *(*)(void *addr, size_t length, int prot,
int flags, int fd, off64_t offset);
using munmap_func_t = int (*)(void *addr, size_t length);
openat_func_t openat;
dup_func_t dup;
close_func_t close;
ioctl_func_t ioctl;
mmap_func_t mmap;
munmap_func_t munmap;
};
static V4L2CompatManager *instance();
const FileOperations &fops() const { return fops_; }
int openat(int dirfd, const char *path, int oflag, mode_t mode);
int dup(int oldfd);
int close(int fd);
void *mmap(void *addr, size_t length, int prot, int flags,
int fd, off64_t offset);
int munmap(void *addr, size_t length);
int ioctl(int fd, unsigned long request, void *arg);
private:
V4L2CompatManager();
~V4L2CompatManager();
int start();
int getCameraIndex(int fd);
std::shared_ptr<V4L2CameraFile> cameraFile(int fd);
FileOperations fops_;
libcamera::CameraManager *cm_;
std::vector<std::unique_ptr<V4L2CameraProxy>> proxies_;
std::map<int, std::shared_ptr<V4L2CameraFile>> files_;
std::map<void *, std::shared_ptr<V4L2CameraFile>> mmaps_;
};
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* V4L2 compatibility camera
*/
#include "v4l2_camera.h"
#include <errno.h>
#include <unistd.h>
#include <libcamera/base/log.h>
using namespace libcamera;
LOG_DECLARE_CATEGORY(V4L2Compat)
V4L2Camera::V4L2Camera(std::shared_ptr<Camera> camera)
: camera_(camera), isRunning_(false), bufferAllocator_(nullptr),
efd_(-1), bufferAvailableCount_(0)
{
camera_->requestCompleted.connect(this, &V4L2Camera::requestComplete);
}
V4L2Camera::~V4L2Camera()
{
close();
}
int V4L2Camera::open(StreamConfiguration *streamConfig)
{
if (camera_->acquire() < 0) {
LOG(V4L2Compat, Error) << "Failed to acquire camera";
return -EINVAL;
}
config_ = camera_->generateConfiguration({ StreamRole::Viewfinder });
if (!config_) {
camera_->release();
return -EINVAL;
}
bufferAllocator_ = new FrameBufferAllocator(camera_);
*streamConfig = config_->at(0);
return 0;
}
void V4L2Camera::close()
{
requestPool_.clear();
delete bufferAllocator_;
bufferAllocator_ = nullptr;
camera_->release();
}
void V4L2Camera::bind(int efd)
{
efd_ = efd;
}
void V4L2Camera::unbind()
{
efd_ = -1;
}
std::vector<V4L2Camera::Buffer> V4L2Camera::completedBuffers()
{
std::vector<Buffer> v;
MutexLocker lock(bufferLock_);
for (std::unique_ptr<Buffer> &metadata : completedBuffers_)
v.push_back(*metadata.get());
completedBuffers_.clear();
return v;
}
void V4L2Camera::requestComplete(Request *request)
{
if (request->status() == Request::RequestCancelled)
return;
/* We only have one stream at the moment. */
bufferLock_.lock();
FrameBuffer *buffer = request->buffers().begin()->second;
std::unique_ptr<Buffer> metadata =
std::make_unique<Buffer>(request->cookie(), buffer->metadata());
completedBuffers_.push_back(std::move(metadata));
bufferLock_.unlock();
uint64_t data = 1;
int ret = ::write(efd_, &data, sizeof(data));
if (ret != sizeof(data))
LOG(V4L2Compat, Error) << "Failed to signal eventfd POLLIN";
request->reuse();
{
MutexLocker locker(bufferMutex_);
bufferAvailableCount_++;
}
bufferCV_.notify_all();
}
int V4L2Camera::configure(StreamConfiguration *streamConfigOut,
const Size &size, const PixelFormat &pixelformat,
unsigned int bufferCount)
{
StreamConfiguration &streamConfig = config_->at(0);
streamConfig.size.width = size.width;
streamConfig.size.height = size.height;
streamConfig.pixelFormat = pixelformat;
streamConfig.bufferCount = bufferCount;
/* \todo memoryType (interval vs external) */
CameraConfiguration::Status validation = config_->validate();
if (validation == CameraConfiguration::Invalid) {
LOG(V4L2Compat, Debug) << "Configuration invalid";
return -EINVAL;
}
if (validation == CameraConfiguration::Adjusted)
LOG(V4L2Compat, Debug) << "Configuration adjusted";
LOG(V4L2Compat, Debug) << "Validated configuration is: "
<< streamConfig.toString();
int ret = camera_->configure(config_.get());
if (ret < 0)
return ret;
*streamConfigOut = config_->at(0);
return 0;
}
int V4L2Camera::validateConfiguration(const PixelFormat &pixelFormat,
const Size &size,
StreamConfiguration *streamConfigOut)
{
std::unique_ptr<CameraConfiguration> config =
camera_->generateConfiguration({ StreamRole::Viewfinder });
StreamConfiguration &cfg = config->at(0);
cfg.size = size;
cfg.pixelFormat = pixelFormat;
cfg.bufferCount = 1;
CameraConfiguration::Status validation = config->validate();
if (validation == CameraConfiguration::Invalid)
return -EINVAL;
*streamConfigOut = cfg;
return 0;
}
int V4L2Camera::allocBuffers(unsigned int count)
{
Stream *stream = config_->at(0).stream();
int ret = bufferAllocator_->allocate(stream);
if (ret < 0)
return ret;
for (unsigned int i = 0; i < count; i++) {
std::unique_ptr<Request> request = camera_->createRequest(i);
if (!request) {
requestPool_.clear();
return -ENOMEM;
}
requestPool_.push_back(std::move(request));
}
return ret;
}
void V4L2Camera::freeBuffers()
{
pendingRequests_.clear();
requestPool_.clear();
Stream *stream = config_->at(0).stream();
bufferAllocator_->free(stream);
}
int V4L2Camera::getBufferFd(unsigned int index)
{
Stream *stream = config_->at(0).stream();
const std::vector<std::unique_ptr<FrameBuffer>> &buffers =
bufferAllocator_->buffers(stream);
if (buffers.size() <= index)
return -1;
return buffers[index]->planes()[0].fd.get();
}
int V4L2Camera::streamOn()
{
if (isRunning_)
return 0;
int ret = camera_->start();
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
isRunning_ = true;
for (Request *req : pendingRequests_) {
/* \todo What should we do if this returns -EINVAL? */
ret = camera_->queueRequest(req);
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
}
pendingRequests_.clear();
return 0;
}
int V4L2Camera::streamOff()
{
if (!isRunning_) {
for (std::unique_ptr<Request> &req : requestPool_)
req->reuse();
return 0;
}
pendingRequests_.clear();
int ret = camera_->stop();
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
{
MutexLocker locker(bufferMutex_);
isRunning_ = false;
}
bufferCV_.notify_all();
return 0;
}
int V4L2Camera::qbuf(unsigned int index)
{
if (index >= requestPool_.size()) {
LOG(V4L2Compat, Error) << "Invalid index";
return -EINVAL;
}
Request *request = requestPool_[index].get();
Stream *stream = config_->at(0).stream();
FrameBuffer *buffer = bufferAllocator_->buffers(stream)[index].get();
int ret = request->addBuffer(stream, buffer);
if (ret < 0) {
LOG(V4L2Compat, Error) << "Can't set buffer for request";
return -ENOMEM;
}
if (!isRunning_) {
pendingRequests_.push_back(request);
return 0;
}
ret = camera_->queueRequest(request);
if (ret < 0) {
LOG(V4L2Compat, Error) << "Can't queue request";
return ret == -EACCES ? -EBUSY : ret;
}
return 0;
}
void V4L2Camera::waitForBufferAvailable()
{
MutexLocker locker(bufferMutex_);
bufferCV_.wait(locker, [&]() LIBCAMERA_TSA_REQUIRES(bufferMutex_) {
return bufferAvailableCount_ >= 1 || !isRunning_;
});
if (isRunning_)
bufferAvailableCount_--;
}
bool V4L2Camera::isBufferAvailable()
{
MutexLocker locker(bufferMutex_);
if (bufferAvailableCount_ < 1)
return false;
bufferAvailableCount_--;
return true;
}
bool V4L2Camera::isRunning()
{
return isRunning_;
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_compat.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* V4L2 compatibility layer
*/
#include "v4l2_compat_manager.h"
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
#include <stdarg.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <libcamera/base/utils.h>
#define LIBCAMERA_PUBLIC __attribute__((visibility("default")))
using namespace libcamera;
#define extract_va_arg(type, arg, last) \
{ \
va_list ap; \
va_start(ap, last); \
arg = va_arg(ap, type); \
va_end(ap); \
}
namespace {
/*
* Determine if the flags require a further mode arguments that needs to be
* parsed from va_args.
*/
bool needs_mode(int flags)
{
return (flags & O_CREAT) || ((flags & O_TMPFILE) == O_TMPFILE);
}
} /* namespace */
extern "C" {
LIBCAMERA_PUBLIC int open(const char *path, int oflag, ...)
{
mode_t mode = 0;
if (needs_mode(oflag))
extract_va_arg(mode_t, mode, oflag);
return V4L2CompatManager::instance()->openat(AT_FDCWD, path,
oflag, mode);
}
/* _FORTIFY_SOURCE redirects open to __open_2 */
LIBCAMERA_PUBLIC int __open_2(const char *path, int oflag)
{
assert(!needs_mode(oflag));
return open(path, oflag);
}
#ifndef open64
LIBCAMERA_PUBLIC int open64(const char *path, int oflag, ...)
{
mode_t mode = 0;
if (needs_mode(oflag))
extract_va_arg(mode_t, mode, oflag);
return V4L2CompatManager::instance()->openat(AT_FDCWD, path,
oflag | O_LARGEFILE, mode);
}
LIBCAMERA_PUBLIC int __open64_2(const char *path, int oflag)
{
assert(!needs_mode(oflag));
return open64(path, oflag);
}
#endif
LIBCAMERA_PUBLIC int openat(int dirfd, const char *path, int oflag, ...)
{
mode_t mode = 0;
if (needs_mode(oflag))
extract_va_arg(mode_t, mode, oflag);
return V4L2CompatManager::instance()->openat(dirfd, path, oflag, mode);
}
LIBCAMERA_PUBLIC int __openat_2(int dirfd, const char *path, int oflag)
{
assert(!needs_mode(oflag));
return openat(dirfd, path, oflag);
}
#ifndef openat64
LIBCAMERA_PUBLIC int openat64(int dirfd, const char *path, int oflag, ...)
{
mode_t mode = 0;
if (needs_mode(oflag))
extract_va_arg(mode_t, mode, oflag);
return V4L2CompatManager::instance()->openat(dirfd, path,
oflag | O_LARGEFILE, mode);
}
LIBCAMERA_PUBLIC int __openat64_2(int dirfd, const char *path, int oflag)
{
assert(!needs_mode(oflag));
return openat64(dirfd, path, oflag);
}
#endif
LIBCAMERA_PUBLIC int dup(int oldfd)
{
return V4L2CompatManager::instance()->dup(oldfd);
}
LIBCAMERA_PUBLIC int close(int fd)
{
return V4L2CompatManager::instance()->close(fd);
}
LIBCAMERA_PUBLIC void *mmap(void *addr, size_t length, int prot, int flags,
int fd, off_t offset)
{
return V4L2CompatManager::instance()->mmap(addr, length, prot, flags,
fd, offset);
}
#ifndef mmap64
LIBCAMERA_PUBLIC void *mmap64(void *addr, size_t length, int prot, int flags,
int fd, off64_t offset)
{
return V4L2CompatManager::instance()->mmap(addr, length, prot, flags,
fd, offset);
}
#endif
LIBCAMERA_PUBLIC int munmap(void *addr, size_t length)
{
return V4L2CompatManager::instance()->munmap(addr, length);
}
LIBCAMERA_PUBLIC int ioctl(int fd, unsigned long request, ...)
{
void *arg;
extract_va_arg(void *, arg, request);
return V4L2CompatManager::instance()->ioctl(fd, request, arg);
}
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera_file.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* V4L2 compatibility camera file information
*/
#pragma once
#include <string>
#include <linux/videodev2.h>
class V4L2CameraProxy;
class V4L2CameraFile
{
public:
V4L2CameraFile(int dirfd, const char *path, int efd, bool nonBlocking,
V4L2CameraProxy *proxy);
~V4L2CameraFile();
V4L2CameraProxy *proxy() const { return proxy_; }
bool nonBlocking() const { return nonBlocking_; }
int efd() const { return efd_; }
enum v4l2_priority priority() const { return priority_; }
void setPriority(enum v4l2_priority priority) { priority_ = priority; }
const std::string &description() const;
private:
V4L2CameraProxy *proxy_;
std::string description_;
bool nonBlocking_;
int efd_;
enum v4l2_priority priority_;
};
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* V4L2 compatibility camera
*/
#pragma once
#include <deque>
#include <utility>
#include <libcamera/base/mutex.h>
#include <libcamera/base/semaphore.h>
#include <libcamera/base/shared_fd.h>
#include <libcamera/camera.h>
#include <libcamera/framebuffer.h>
#include <libcamera/framebuffer_allocator.h>
class V4L2Camera
{
public:
struct Buffer {
Buffer(unsigned int index, const libcamera::FrameMetadata &data)
: index_(index), data_(data)
{
}
unsigned int index_;
libcamera::FrameMetadata data_;
};
V4L2Camera(std::shared_ptr<libcamera::Camera> camera);
~V4L2Camera();
int open(libcamera::StreamConfiguration *streamConfig);
void close();
void bind(int efd);
void unbind();
std::vector<Buffer> completedBuffers() LIBCAMERA_TSA_EXCLUDES(bufferLock_);
int configure(libcamera::StreamConfiguration *streamConfigOut,
const libcamera::Size &size,
const libcamera::PixelFormat &pixelformat,
unsigned int bufferCount);
int validateConfiguration(const libcamera::PixelFormat &pixelformat,
const libcamera::Size &size,
libcamera::StreamConfiguration *streamConfigOut);
int allocBuffers(unsigned int count);
void freeBuffers();
int getBufferFd(unsigned int index);
int streamOn();
int streamOff();
int qbuf(unsigned int index);
void waitForBufferAvailable() LIBCAMERA_TSA_EXCLUDES(bufferMutex_);
bool isBufferAvailable() LIBCAMERA_TSA_EXCLUDES(bufferMutex_);
bool isRunning();
private:
void requestComplete(libcamera::Request *request)
LIBCAMERA_TSA_EXCLUDES(bufferLock_);
std::shared_ptr<libcamera::Camera> camera_;
std::unique_ptr<libcamera::CameraConfiguration> config_;
bool isRunning_;
libcamera::Mutex bufferLock_;
libcamera::FrameBufferAllocator *bufferAllocator_;
std::vector<std::unique_ptr<libcamera::Request>> requestPool_;
std::deque<libcamera::Request *> pendingRequests_;
std::deque<std::unique_ptr<Buffer>> completedBuffers_
LIBCAMERA_TSA_GUARDED_BY(bufferLock_);
int efd_;
libcamera::Mutex bufferMutex_;
libcamera::ConditionVariable bufferCV_;
unsigned int bufferAvailableCount_ LIBCAMERA_TSA_GUARDED_BY(bufferMutex_);
};
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera_proxy.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Proxy to V4L2 compatibility camera
*/
#include "v4l2_camera_proxy.h"
#include <algorithm>
#include <array>
#include <errno.h>
#include <numeric>
#include <set>
#include <string.h>
#include <sys/mman.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <libcamera/base/log.h>
#include <libcamera/base/object.h>
#include <libcamera/base/utils.h>
#include <libcamera/camera.h>
#include <libcamera/formats.h>
#include "libcamera/internal/formats.h"
#include "v4l2_camera.h"
#include "v4l2_camera_file.h"
#include "v4l2_compat_manager.h"
#define KERNEL_VERSION(a, b, c) (((a) << 16) + ((b) << 8) + (c))
using namespace libcamera;
LOG_DECLARE_CATEGORY(V4L2Compat)
V4L2CameraProxy::V4L2CameraProxy(unsigned int index,
std::shared_ptr<Camera> camera)
: refcount_(0), index_(index), bufferCount_(0), currentBuf_(0),
vcam_(std::make_unique<V4L2Camera>(camera)), owner_(nullptr)
{
querycap(camera);
}
int V4L2CameraProxy::open(V4L2CameraFile *file)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
if (refcount_++) {
files_.insert(file);
return 0;
}
/*
* We open the camera here, once, and keep it open until the last
* V4L2CameraFile is closed. The proxy is initially not owned by any
* file. The first file that calls reqbufs with count > 0 or s_fmt
* will become the owner, and no other file will be allowed to call
* buffer-related ioctls (except querybuf), set the format, or start or
* stop the stream until ownership is released with a call to reqbufs
* with count = 0.
*/
int ret = vcam_->open(&streamConfig_);
if (ret < 0) {
refcount_--;
return ret;
}
setFmtFromConfig(streamConfig_);
files_.insert(file);
return 0;
}
void V4L2CameraProxy::close(V4L2CameraFile *file)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
files_.erase(file);
release(file);
if (--refcount_ > 0)
return;
vcam_->close();
}
void *V4L2CameraProxy::mmap(V4L2CameraFile *file, void *addr, size_t length,
int prot, int flags, off64_t offset)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
/*
* Mimic the videobuf2 behaviour, which requires PROT_READ and
* MAP_SHARED.
*/
if (!(prot & PROT_READ)) {
errno = EINVAL;
return MAP_FAILED;
}
if (!(flags & MAP_SHARED)) {
errno = EINVAL;
return MAP_FAILED;
}
unsigned int index = offset / sizeimage_;
if (static_cast<off_t>(index * sizeimage_) != offset ||
length != sizeimage_) {
errno = EINVAL;
return MAP_FAILED;
}
int fd = vcam_->getBufferFd(index);
if (fd < 0) {
errno = EINVAL;
return MAP_FAILED;
}
void *map = V4L2CompatManager::instance()->fops().mmap(addr, length, prot,
flags, fd, 0);
if (map == MAP_FAILED)
return map;
buffers_[index].flags |= V4L2_BUF_FLAG_MAPPED;
mmaps_[map] = index;
return map;
}
int V4L2CameraProxy::munmap(V4L2CameraFile *file, void *addr, size_t length)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
auto iter = mmaps_.find(addr);
if (iter == mmaps_.end() || length != sizeimage_) {
errno = EINVAL;
return -1;
}
if (V4L2CompatManager::instance()->fops().munmap(addr, length))
LOG(V4L2Compat, Error) << "Failed to unmap " << addr
<< " with length " << length;
buffers_[iter->second].flags &= ~V4L2_BUF_FLAG_MAPPED;
mmaps_.erase(iter);
return 0;
}
bool V4L2CameraProxy::validateBufferType(uint32_t type)
{
return type == V4L2_BUF_TYPE_VIDEO_CAPTURE;
}
bool V4L2CameraProxy::validateMemoryType(uint32_t memory)
{
return memory == V4L2_MEMORY_MMAP;
}
void V4L2CameraProxy::setFmtFromConfig(const StreamConfiguration &streamConfig)
{
const Size &size = streamConfig.size;
v4l2PixFormat_.width = size.width;
v4l2PixFormat_.height = size.height;
v4l2PixFormat_.pixelformat = V4L2PixelFormat::fromPixelFormat(streamConfig.pixelFormat)[0];
v4l2PixFormat_.field = V4L2_FIELD_NONE;
v4l2PixFormat_.bytesperline = streamConfig.stride;
v4l2PixFormat_.sizeimage = streamConfig.frameSize;
v4l2PixFormat_.colorspace = V4L2_COLORSPACE_SRGB;
v4l2PixFormat_.priv = V4L2_PIX_FMT_PRIV_MAGIC;
v4l2PixFormat_.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
v4l2PixFormat_.quantization = V4L2_QUANTIZATION_DEFAULT;
v4l2PixFormat_.xfer_func = V4L2_XFER_FUNC_DEFAULT;
sizeimage_ = streamConfig.frameSize;
}
void V4L2CameraProxy::querycap(std::shared_ptr<Camera> camera)
{
std::string driver = "libcamera";
std::string bus_info = driver + ":" + std::to_string(index_);
utils::strlcpy(reinterpret_cast<char *>(capabilities_.driver), driver.c_str(),
sizeof(capabilities_.driver));
utils::strlcpy(reinterpret_cast<char *>(capabilities_.card), camera->id().c_str(),
sizeof(capabilities_.card));
utils::strlcpy(reinterpret_cast<char *>(capabilities_.bus_info), bus_info.c_str(),
sizeof(capabilities_.bus_info));
/* \todo Put this in a header/config somewhere. */
capabilities_.version = KERNEL_VERSION(5, 2, 0);
capabilities_.device_caps = V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING
| V4L2_CAP_EXT_PIX_FORMAT;
capabilities_.capabilities = capabilities_.device_caps
| V4L2_CAP_DEVICE_CAPS;
memset(capabilities_.reserved, 0, sizeof(capabilities_.reserved));
}
void V4L2CameraProxy::updateBuffers()
{
std::vector<V4L2Camera::Buffer> completedBuffers = vcam_->completedBuffers();
for (const V4L2Camera::Buffer &buffer : completedBuffers) {
const FrameMetadata &fmd = buffer.data_;
struct v4l2_buffer &buf = buffers_[buffer.index_];
switch (fmd.status) {
case FrameMetadata::FrameSuccess:
buf.bytesused = std::accumulate(fmd.planes().begin(),
fmd.planes().end(), 0,
[](unsigned int total, const auto &plane) {
return total + plane.bytesused;
});
buf.field = V4L2_FIELD_NONE;
buf.timestamp.tv_sec = fmd.timestamp / 1000000000;
buf.timestamp.tv_usec = (fmd.timestamp / 1000) % 1000000;
buf.sequence = fmd.sequence;
buf.flags |= V4L2_BUF_FLAG_DONE;
break;
case FrameMetadata::FrameError:
buf.flags |= V4L2_BUF_FLAG_ERROR;
break;
default:
break;
}
}
}
int V4L2CameraProxy::vidioc_querycap(V4L2CameraFile *file, struct v4l2_capability *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
*arg = capabilities_;
return 0;
}
int V4L2CameraProxy::vidioc_enum_framesizes(V4L2CameraFile *file, struct v4l2_frmsizeenum *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->pixel_format);
PixelFormat format = v4l2Format.toPixelFormat();
/*
* \todo This might need to be expanded as few pipeline handlers
* report StreamFormats.
*/
const std::vector<Size> &frameSizes = streamConfig_.formats().sizes(format);
if (arg->index >= frameSizes.size())
return -EINVAL;
arg->type = V4L2_FRMSIZE_TYPE_DISCRETE;
arg->discrete.width = frameSizes[arg->index].width;
arg->discrete.height = frameSizes[arg->index].height;
memset(arg->reserved, 0, sizeof(arg->reserved));
return 0;
}
int V4L2CameraProxy::vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type) ||
arg->index >= streamConfig_.formats().pixelformats().size())
return -EINVAL;
PixelFormat format = streamConfig_.formats().pixelformats()[arg->index];
V4L2PixelFormat v4l2Format = V4L2PixelFormat::fromPixelFormat(format)[0];
arg->flags = format == formats::MJPEG ? V4L2_FMT_FLAG_COMPRESSED : 0;
utils::strlcpy(reinterpret_cast<char *>(arg->description),
v4l2Format.description(), sizeof(arg->description));
arg->pixelformat = v4l2Format;
memset(arg->reserved, 0, sizeof(arg->reserved));
return 0;
}
int V4L2CameraProxy::vidioc_g_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
memset(&arg->fmt, 0, sizeof(arg->fmt));
arg->fmt.pix = v4l2PixFormat_;
return 0;
}
int V4L2CameraProxy::tryFormat(struct v4l2_format *arg)
{
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->fmt.pix.pixelformat);
PixelFormat format = v4l2Format.toPixelFormat();
Size size(arg->fmt.pix.width, arg->fmt.pix.height);
StreamConfiguration config;
int ret = vcam_->validateConfiguration(format, size, &config);
if (ret < 0) {
LOG(V4L2Compat, Error)
<< "Failed to negotiate a valid format: "
<< format;
return -EINVAL;
}
arg->fmt.pix.width = config.size.width;
arg->fmt.pix.height = config.size.height;
arg->fmt.pix.pixelformat = V4L2PixelFormat::fromPixelFormat(config.pixelFormat)[0];
arg->fmt.pix.field = V4L2_FIELD_NONE;
arg->fmt.pix.bytesperline = config.stride;
arg->fmt.pix.sizeimage = config.frameSize;
arg->fmt.pix.colorspace = V4L2_COLORSPACE_SRGB;
arg->fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
arg->fmt.pix.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
arg->fmt.pix.quantization = V4L2_QUANTIZATION_DEFAULT;
arg->fmt.pix.xfer_func = V4L2_XFER_FUNC_DEFAULT;
return 0;
}
int V4L2CameraProxy::vidioc_s_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
int ret = acquire(file);
if (ret < 0)
return ret;
ret = tryFormat(arg);
if (ret < 0)
return ret;
Size size(arg->fmt.pix.width, arg->fmt.pix.height);
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->fmt.pix.pixelformat);
ret = vcam_->configure(&streamConfig_, size, v4l2Format.toPixelFormat(),
bufferCount_);
if (ret < 0)
return -EINVAL;
setFmtFromConfig(streamConfig_);
return 0;
}
int V4L2CameraProxy::vidioc_try_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
int ret = tryFormat(arg);
if (ret < 0)
return ret;
return 0;
}
enum v4l2_priority V4L2CameraProxy::maxPriority()
{
auto max = std::max_element(files_.begin(), files_.end(),
[](const V4L2CameraFile *a, const V4L2CameraFile *b) {
return a->priority() < b->priority();
});
return max != files_.end() ? (*max)->priority() : V4L2_PRIORITY_UNSET;
}
int V4L2CameraProxy::vidioc_g_priority(V4L2CameraFile *file, enum v4l2_priority *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
*arg = maxPriority();
return 0;
}
int V4L2CameraProxy::vidioc_s_priority(V4L2CameraFile *file, enum v4l2_priority *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (*arg > V4L2_PRIORITY_RECORD)
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
file->setPriority(*arg);
return 0;
}
int V4L2CameraProxy::vidioc_enuminput(V4L2CameraFile *file, struct v4l2_input *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (arg->index != 0)
return -EINVAL;
memset(arg, 0, sizeof(*arg));
utils::strlcpy(reinterpret_cast<char *>(arg->name),
reinterpret_cast<char *>(capabilities_.card),
sizeof(arg->name));
arg->type = V4L2_INPUT_TYPE_CAMERA;
return 0;
}
int V4L2CameraProxy::vidioc_g_input(V4L2CameraFile *file, int *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
*arg = 0;
return 0;
}
int V4L2CameraProxy::vidioc_s_input(V4L2CameraFile *file, int *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (*arg != 0)
return -EINVAL;
return 0;
}
void V4L2CameraProxy::freeBuffers()
{
vcam_->freeBuffers();
buffers_.clear();
bufferCount_ = 0;
}
int V4L2CameraProxy::vidioc_reqbufs(V4L2CameraFile *file, struct v4l2_requestbuffers *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type) ||
!validateMemoryType(arg->memory))
return -EINVAL;
LOG(V4L2Compat, Debug) << arg->count << " buffers requested ";
if (file->priority() < maxPriority())
return -EBUSY;
if (!hasOwnership(file) && owner_)
return -EBUSY;
arg->capabilities = V4L2_BUF_CAP_SUPPORTS_MMAP;
arg->flags = 0;
memset(arg->reserved, 0, sizeof(arg->reserved));
if (arg->count == 0) {
/* \todo Add buffer orphaning support */
if (!mmaps_.empty())
return -EBUSY;
if (vcam_->isRunning())
return -EBUSY;
freeBuffers();
release(file);
return 0;
}
if (bufferCount_ > 0)
freeBuffers();
Size size(v4l2PixFormat_.width, v4l2PixFormat_.height);
V4L2PixelFormat v4l2Format = V4L2PixelFormat(v4l2PixFormat_.pixelformat);
int ret = vcam_->configure(&streamConfig_, size,
v4l2Format.toPixelFormat(), arg->count);
if (ret < 0)
return -EINVAL;
setFmtFromConfig(streamConfig_);
arg->count = streamConfig_.bufferCount;
bufferCount_ = arg->count;
ret = vcam_->allocBuffers(arg->count);
if (ret < 0) {
arg->count = 0;
return ret;
}
buffers_.resize(arg->count);
for (unsigned int i = 0; i < arg->count; i++) {
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.length = v4l2PixFormat_.sizeimage;
buf.memory = V4L2_MEMORY_MMAP;
buf.m.offset = i * v4l2PixFormat_.sizeimage;
buf.index = i;
buf.flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
buffers_[i] = buf;
}
LOG(V4L2Compat, Debug) << "Allocated " << arg->count << " buffers";
acquire(file);
return 0;
}
int V4L2CameraProxy::vidioc_querybuf(V4L2CameraFile *file, struct v4l2_buffer *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (arg->index >= bufferCount_)
return -EINVAL;
if (!validateBufferType(arg->type) ||
arg->index >= bufferCount_)
return -EINVAL;
updateBuffers();
*arg = buffers_[arg->index];
return 0;
}
int V4L2CameraProxy::vidioc_prepare_buf(V4L2CameraFile *file, struct v4l2_buffer *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__
<< "(index=" << arg->index << ")";
if (!hasOwnership(file))
return -EBUSY;
if (arg->index >= bufferCount_)
return -EINVAL;
if (arg->flags & V4L2_BUF_FLAG_REQUEST_FD)
return -EINVAL;
if (!validateBufferType(arg->type) ||
!validateMemoryType(arg->memory))
return -EINVAL;
struct v4l2_buffer &buffer = buffers_[arg->index];
if (buffer.flags & V4L2_BUF_FLAG_QUEUED ||
buffer.flags & V4L2_BUF_FLAG_PREPARED)
return -EINVAL;
buffer.flags |= V4L2_BUF_FLAG_PREPARED;
arg->flags = buffer.flags;
return 0;
}
int V4L2CameraProxy::vidioc_qbuf(V4L2CameraFile *file, struct v4l2_buffer *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__
<< "(index=" << arg->index << ")";
if (arg->index >= bufferCount_)
return -EINVAL;
if (buffers_[arg->index].flags & V4L2_BUF_FLAG_QUEUED)
return -EINVAL;
if (!hasOwnership(file))
return -EBUSY;
if (!validateBufferType(arg->type) ||
!validateMemoryType(arg->memory) ||
arg->index >= bufferCount_)
return -EINVAL;
int ret = vcam_->qbuf(arg->index);
if (ret < 0)
return ret;
buffers_[arg->index].flags |= V4L2_BUF_FLAG_QUEUED;
arg->flags = buffers_[arg->index].flags;
return ret;
}
int V4L2CameraProxy::vidioc_dqbuf(V4L2CameraFile *file, struct v4l2_buffer *arg,
Mutex *lock)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (arg->index >= bufferCount_)
return -EINVAL;
if (!hasOwnership(file))
return -EBUSY;
if (!vcam_->isRunning())
return -EINVAL;
if (!validateBufferType(arg->type) ||
!validateMemoryType(arg->memory))
return -EINVAL;
if (!file->nonBlocking()) {
lock->unlock();
vcam_->waitForBufferAvailable();
lock->lock();
} else if (!vcam_->isBufferAvailable())
return -EAGAIN;
/*
* We need to check here again in case stream was turned off while we
* were blocked on waitForBufferAvailable().
*/
if (!vcam_->isRunning())
return -EINVAL;
updateBuffers();
struct v4l2_buffer &buf = buffers_[currentBuf_];
buf.flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE | V4L2_BUF_FLAG_PREPARED);
buf.length = sizeimage_;
*arg = buf;
currentBuf_ = (currentBuf_ + 1) % bufferCount_;
uint64_t data;
int ret = ::read(file->efd(), &data, sizeof(data));
if (ret != sizeof(data))
LOG(V4L2Compat, Error) << "Failed to clear eventfd POLLIN";
return 0;
}
int V4L2CameraProxy::vidioc_expbuf(V4L2CameraFile *file, struct v4l2_exportbuffer *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!hasOwnership(file))
return -EBUSY;
/* \todo Verify that the memory type is MMAP when adding DMABUF support */
if (!validateBufferType(arg->type))
return -EINVAL;
if (arg->index >= bufferCount_)
return -EINVAL;
if (arg->flags & ~(O_CLOEXEC | O_ACCMODE))
return -EINVAL;
memset(arg->reserved, 0, sizeof(arg->reserved));
/* \todo honor the O_ACCMODE flags passed to this function */
arg->fd = fcntl(vcam_->getBufferFd(arg->index),
arg->flags & O_CLOEXEC ? F_DUPFD_CLOEXEC : F_DUPFD, 0);
return 0;
}
int V4L2CameraProxy::vidioc_streamon(V4L2CameraFile *file, int *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (bufferCount_ == 0)
return -EINVAL;
if (!validateBufferType(*arg))
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
if (!hasOwnership(file))
return -EBUSY;
if (vcam_->isRunning())
return 0;
currentBuf_ = 0;
return vcam_->streamOn();
}
int V4L2CameraProxy::vidioc_streamoff(V4L2CameraFile *file, int *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(*arg))
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
if (!hasOwnership(file) && owner_)
return -EBUSY;
int ret = vcam_->streamOff();
for (struct v4l2_buffer &buf : buffers_)
buf.flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE);
return ret;
}
const std::set<unsigned long> V4L2CameraProxy::supportedIoctls_ = {
VIDIOC_QUERYCAP,
VIDIOC_ENUM_FRAMESIZES,
VIDIOC_ENUM_FMT,
VIDIOC_G_FMT,
VIDIOC_S_FMT,
VIDIOC_TRY_FMT,
VIDIOC_G_PRIORITY,
VIDIOC_S_PRIORITY,
VIDIOC_ENUMINPUT,
VIDIOC_G_INPUT,
VIDIOC_S_INPUT,
VIDIOC_REQBUFS,
VIDIOC_QUERYBUF,
VIDIOC_PREPARE_BUF,
VIDIOC_QBUF,
VIDIOC_DQBUF,
VIDIOC_EXPBUF,
VIDIOC_STREAMON,
VIDIOC_STREAMOFF,
};
int V4L2CameraProxy::ioctl(V4L2CameraFile *file, unsigned long longRequest, void *arg)
{
MutexLocker locker(proxyMutex_);
/*
* The Linux Kernel only processes 32 bits of an IOCTL.
*
* Prevent unexpected sign-extensions that could occur if applications
* use a signed int for the ioctl request, which would sign-extend to
* an incorrect value for unsigned longs on 64 bit architectures by
* explicitly casting as an unsigned int here.
*/
unsigned int request = longRequest;
if (!arg && (_IOC_DIR(request) & _IOC_WRITE)) {
errno = EFAULT;
return -1;
}
if (supportedIoctls_.find(request) == supportedIoctls_.end()) {
errno = ENOTTY;
return -1;
}
if (!arg && (_IOC_DIR(request) & _IOC_READ)) {
errno = EFAULT;
return -1;
}
int ret;
switch (request) {
case VIDIOC_QUERYCAP:
ret = vidioc_querycap(file, static_cast<struct v4l2_capability *>(arg));
break;
case VIDIOC_ENUM_FRAMESIZES:
ret = vidioc_enum_framesizes(file, static_cast<struct v4l2_frmsizeenum *>(arg));
break;
case VIDIOC_ENUM_FMT:
ret = vidioc_enum_fmt(file, static_cast<struct v4l2_fmtdesc *>(arg));
break;
case VIDIOC_G_FMT:
ret = vidioc_g_fmt(file, static_cast<struct v4l2_format *>(arg));
break;
case VIDIOC_S_FMT:
ret = vidioc_s_fmt(file, static_cast<struct v4l2_format *>(arg));
break;
case VIDIOC_TRY_FMT:
ret = vidioc_try_fmt(file, static_cast<struct v4l2_format *>(arg));
break;
case VIDIOC_G_PRIORITY:
ret = vidioc_g_priority(file, static_cast<enum v4l2_priority *>(arg));
break;
case VIDIOC_S_PRIORITY:
ret = vidioc_s_priority(file, static_cast<enum v4l2_priority *>(arg));
break;
case VIDIOC_ENUMINPUT:
ret = vidioc_enuminput(file, static_cast<struct v4l2_input *>(arg));
break;
case VIDIOC_G_INPUT:
ret = vidioc_g_input(file, static_cast<int *>(arg));
break;
case VIDIOC_S_INPUT:
ret = vidioc_s_input(file, static_cast<int *>(arg));
break;
case VIDIOC_REQBUFS:
ret = vidioc_reqbufs(file, static_cast<struct v4l2_requestbuffers *>(arg));
break;
case VIDIOC_QUERYBUF:
ret = vidioc_querybuf(file, static_cast<struct v4l2_buffer *>(arg));
break;
case VIDIOC_QBUF:
ret = vidioc_qbuf(file, static_cast<struct v4l2_buffer *>(arg));
break;
case VIDIOC_DQBUF:
ret = vidioc_dqbuf(file, static_cast<struct v4l2_buffer *>(arg), &proxyMutex_);
break;
case VIDIOC_EXPBUF:
ret = vidioc_expbuf(file, static_cast<struct v4l2_exportbuffer *>(arg));
break;
case VIDIOC_STREAMON:
ret = vidioc_streamon(file, static_cast<int *>(arg));
break;
case VIDIOC_STREAMOFF:
ret = vidioc_streamoff(file, static_cast<int *>(arg));
break;
default:
ret = -ENOTTY;
break;
}
if (ret < 0) {
errno = -ret;
return -1;
}
return ret;
}
bool V4L2CameraProxy::hasOwnership(V4L2CameraFile *file)
{
return owner_ == file;
}
/**
* \brief Acquire exclusive ownership of the V4L2Camera
*
* \return Zero on success or if already acquired, and negative error on
* failure.
*
* This is sufficient for poll()ing for buffers. Events, however, are signaled
* on the file level, so all fds must be signaled. poll()ing from a different
* fd than the one that locks the device is a corner case, and is currently not
* supported.
*/
int V4L2CameraProxy::acquire(V4L2CameraFile *file)
{
if (owner_ == file)
return 0;
if (owner_)
return -EBUSY;
vcam_->bind(file->efd());
owner_ = file;
return 0;
}
void V4L2CameraProxy::release(V4L2CameraFile *file)
{
if (owner_ != file)
return;
vcam_->unbind();
owner_ = nullptr;
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera_file.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* V4L2 compatibility camera file information
*/
#include "v4l2_camera_file.h"
#include <fcntl.h>
#include <stdlib.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include "v4l2_camera_proxy.h"
using namespace libcamera;
V4L2CameraFile::V4L2CameraFile(int dirfd, const char *path, int efd,
bool nonBlocking, V4L2CameraProxy *proxy)
: proxy_(proxy), nonBlocking_(nonBlocking), efd_(efd),
priority_(V4L2_PRIORITY_DEFAULT)
{
proxy_->open(this);
if (path[0] != '/') {
if (dirfd == AT_FDCWD) {
char *cwd = getcwd(nullptr, 0);
if (cwd) {
description_ = std::string(cwd) + "/";
free(cwd);
} else {
description_ = std::string("(unreachable)/");
}
} else {
description_ = "(dirfd:" + std::to_string(dirfd) + ")/";
}
}
description_ += std::string(path) + " (fd:" + std::to_string(efd) + ")";
}
V4L2CameraFile::~V4L2CameraFile()
{
proxy_->close(this);
}
const std::string &V4L2CameraFile::description() const
{
return description_;
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_camera_proxy.h | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Proxy to V4L2 compatibility camera
*/
#pragma once
#include <linux/videodev2.h>
#include <map>
#include <memory>
#include <set>
#include <sys/types.h>
#include <vector>
#include <libcamera/base/mutex.h>
#include <libcamera/camera.h>
#include "v4l2_camera.h"
class V4L2CameraFile;
class V4L2CameraProxy
{
public:
V4L2CameraProxy(unsigned int index, std::shared_ptr<libcamera::Camera> camera);
int open(V4L2CameraFile *file) LIBCAMERA_TSA_EXCLUDES(proxyMutex_);
void close(V4L2CameraFile *file) LIBCAMERA_TSA_EXCLUDES(proxyMutex_);
void *mmap(V4L2CameraFile *file, void *addr, size_t length, int prot,
int flags, off64_t offset) LIBCAMERA_TSA_EXCLUDES(proxyMutex_);
int munmap(V4L2CameraFile *file, void *addr, size_t length)
LIBCAMERA_TSA_EXCLUDES(proxyMutex_);
int ioctl(V4L2CameraFile *file, unsigned long request, void *arg)
LIBCAMERA_TSA_EXCLUDES(proxyMutex_);
private:
bool validateBufferType(uint32_t type);
bool validateMemoryType(uint32_t memory);
void setFmtFromConfig(const libcamera::StreamConfiguration &streamConfig);
void querycap(std::shared_ptr<libcamera::Camera> camera);
int tryFormat(struct v4l2_format *arg);
enum v4l2_priority maxPriority();
void updateBuffers();
void freeBuffers();
int vidioc_querycap(V4L2CameraFile *file, struct v4l2_capability *arg);
int vidioc_enum_framesizes(V4L2CameraFile *file, struct v4l2_frmsizeenum *arg);
int vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc *arg);
int vidioc_g_fmt(V4L2CameraFile *file, struct v4l2_format *arg);
int vidioc_s_fmt(V4L2CameraFile *file, struct v4l2_format *arg);
int vidioc_try_fmt(V4L2CameraFile *file, struct v4l2_format *arg);
int vidioc_g_priority(V4L2CameraFile *file, enum v4l2_priority *arg);
int vidioc_s_priority(V4L2CameraFile *file, enum v4l2_priority *arg);
int vidioc_enuminput(V4L2CameraFile *file, struct v4l2_input *arg);
int vidioc_g_input(V4L2CameraFile *file, int *arg);
int vidioc_s_input(V4L2CameraFile *file, int *arg);
int vidioc_reqbufs(V4L2CameraFile *file, struct v4l2_requestbuffers *arg);
int vidioc_querybuf(V4L2CameraFile *file, struct v4l2_buffer *arg);
int vidioc_prepare_buf(V4L2CameraFile *file, struct v4l2_buffer *arg);
int vidioc_qbuf(V4L2CameraFile *file, struct v4l2_buffer *arg);
int vidioc_dqbuf(V4L2CameraFile *file, struct v4l2_buffer *arg,
libcamera::Mutex *lock) LIBCAMERA_TSA_REQUIRES(*lock);
int vidioc_expbuf(V4L2CameraFile *file, struct v4l2_exportbuffer *arg);
int vidioc_streamon(V4L2CameraFile *file, int *arg);
int vidioc_streamoff(V4L2CameraFile *file, int *arg);
bool hasOwnership(V4L2CameraFile *file);
int acquire(V4L2CameraFile *file);
void release(V4L2CameraFile *file);
static const std::set<unsigned long> supportedIoctls_;
unsigned int refcount_;
unsigned int index_;
libcamera::StreamConfiguration streamConfig_;
unsigned int bufferCount_;
unsigned int currentBuf_;
unsigned int sizeimage_;
struct v4l2_capability capabilities_;
struct v4l2_pix_format v4l2PixFormat_;
std::vector<struct v4l2_buffer> buffers_;
std::map<void *, unsigned int> mmaps_;
std::set<V4L2CameraFile *> files_;
std::unique_ptr<V4L2Camera> vcam_;
/*
* This is the exclusive owner of this V4L2CameraProxy instance.
* When there is no owner, anybody can call any ioctl before reqbufs.
* The first file to call reqbufs with count > 0 or s_fmt will become
* the owner, and when the owner calls reqbufs with count = 0 it will
* release ownership. Any buffer-related ioctl (except querybuf) or
* s_fmt that is called by a non-owner while there exists an owner
* will return -EBUSY.
*/
V4L2CameraFile *owner_;
/* This mutex is to serialize access to the proxy. */
libcamera::Mutex proxyMutex_;
};
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/v4l2_compat_manager.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* V4L2 compatibility manager
*/
#include "v4l2_compat_manager.h"
#include <dlfcn.h>
#include <fcntl.h>
#include <map>
#include <stdarg.h>
#include <string.h>
#include <sys/eventfd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/sysmacros.h>
#include <sys/types.h>
#include <unistd.h>
#include <libcamera/base/log.h>
#include <libcamera/base/utils.h>
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
#include <libcamera/property_ids.h>
#include "v4l2_camera_file.h"
using namespace libcamera;
LOG_DEFINE_CATEGORY(V4L2Compat)
namespace {
template<typename T>
void get_symbol(T &func, const char *name)
{
func = reinterpret_cast<T>(dlsym(RTLD_NEXT, name));
}
} /* namespace */
V4L2CompatManager::V4L2CompatManager()
: cm_(nullptr)
{
get_symbol(fops_.openat, "openat64");
get_symbol(fops_.dup, "dup");
get_symbol(fops_.close, "close");
get_symbol(fops_.ioctl, "ioctl");
get_symbol(fops_.mmap, "mmap64");
get_symbol(fops_.munmap, "munmap");
}
V4L2CompatManager::~V4L2CompatManager()
{
files_.clear();
mmaps_.clear();
if (cm_) {
proxies_.clear();
cm_->stop();
delete cm_;
cm_ = nullptr;
}
}
int V4L2CompatManager::start()
{
cm_ = new CameraManager();
int ret = cm_->start();
if (ret) {
LOG(V4L2Compat, Error) << "Failed to start camera manager: "
<< strerror(-ret);
delete cm_;
cm_ = nullptr;
return ret;
}
LOG(V4L2Compat, Debug) << "Started camera manager";
/*
* For each Camera registered in the system, a V4L2CameraProxy gets
* created here to wrap a camera device.
*/
auto cameras = cm_->cameras();
for (auto [index, camera] : utils::enumerate(cameras)) {
V4L2CameraProxy *proxy = new V4L2CameraProxy(index, camera);
proxies_.emplace_back(proxy);
}
return 0;
}
V4L2CompatManager *V4L2CompatManager::instance()
{
static V4L2CompatManager instance;
return &instance;
}
std::shared_ptr<V4L2CameraFile> V4L2CompatManager::cameraFile(int fd)
{
auto file = files_.find(fd);
if (file == files_.end())
return nullptr;
return file->second;
}
int V4L2CompatManager::getCameraIndex(int fd)
{
struct stat statbuf;
int ret = fstat(fd, &statbuf);
if (ret < 0)
return -1;
const dev_t devnum = statbuf.st_rdev;
/*
* Iterate each known camera and identify if it reports this nodes
* device number in its list of SystemDevices.
*/
auto cameras = cm_->cameras();
for (auto [index, camera] : utils::enumerate(cameras)) {
Span<const int64_t> devices = camera->properties()
.get(properties::SystemDevices)
.value_or(Span<int64_t>{});
/*
* While there may be multiple cameras that could reference the
* same device node, we take a first match as a best effort for
* now.
*
* \todo Each camera can be accessed through any of the video
* device nodes that it uses. This may confuse applications.
* Consider reworking the V4L2 adaptation layer to instead
* expose each Camera instance through a single video device
* node (with a consistent and stable mapping). The other
* device nodes could possibly be hidden from the application
* by intercepting additional calls to the C library.
*/
for (const int64_t dev : devices) {
if (dev == static_cast<int64_t>(devnum))
return index;
}
}
return -1;
}
int V4L2CompatManager::openat(int dirfd, const char *path, int oflag, mode_t mode)
{
int fd = fops_.openat(dirfd, path, oflag, mode);
if (fd < 0)
return fd;
struct stat statbuf;
int ret = fstat(fd, &statbuf);
if (ret < 0 || (statbuf.st_mode & S_IFMT) != S_IFCHR ||
major(statbuf.st_rdev) != 81)
return fd;
if (!cm_)
start();
ret = getCameraIndex(fd);
if (ret < 0) {
LOG(V4L2Compat, Debug) << "No camera found for " << path;
return fd;
}
fops_.close(fd);
int efd = eventfd(0, EFD_SEMAPHORE |
((oflag & O_CLOEXEC) ? EFD_CLOEXEC : 0) |
((oflag & O_NONBLOCK) ? EFD_NONBLOCK : 0));
if (efd < 0)
return efd;
V4L2CameraProxy *proxy = proxies_[ret].get();
files_.emplace(efd, std::make_shared<V4L2CameraFile>(dirfd, path, efd,
oflag & O_NONBLOCK,
proxy));
LOG(V4L2Compat, Debug) << "Opened " << path << " -> fd " << efd;
return efd;
}
int V4L2CompatManager::dup(int oldfd)
{
int newfd = fops_.dup(oldfd);
if (newfd < 0)
return newfd;
auto file = files_.find(oldfd);
if (file != files_.end())
files_[newfd] = file->second;
return newfd;
}
int V4L2CompatManager::close(int fd)
{
auto file = files_.find(fd);
if (file != files_.end())
files_.erase(file);
/* We still need to close the eventfd. */
return fops_.close(fd);
}
void *V4L2CompatManager::mmap(void *addr, size_t length, int prot, int flags,
int fd, off64_t offset)
{
std::shared_ptr<V4L2CameraFile> file = cameraFile(fd);
if (!file)
return fops_.mmap(addr, length, prot, flags, fd, offset);
void *map = file->proxy()->mmap(file.get(), addr, length, prot, flags,
offset);
if (map == MAP_FAILED)
return map;
mmaps_[map] = file;
return map;
}
int V4L2CompatManager::munmap(void *addr, size_t length)
{
auto device = mmaps_.find(addr);
if (device == mmaps_.end())
return fops_.munmap(addr, length);
V4L2CameraFile *file = device->second.get();
int ret = file->proxy()->munmap(file, addr, length);
if (ret < 0)
return ret;
mmaps_.erase(device);
return 0;
}
int V4L2CompatManager::ioctl(int fd, unsigned long request, void *arg)
{
std::shared_ptr<V4L2CameraFile> file = cameraFile(fd);
if (!file)
return fops_.ioctl(fd, request, arg);
return file->proxy()->ioctl(file.get(), request, arg);
}
|
0 | repos/libcamera/src | repos/libcamera/src/v4l2/libcamerify.in | #!/bin/sh
# SPDX-License-Identifier: GPL-2.0-or-later
help() {
echo "$0: Load an application with libcamera V4L2 compatibility layer preload"
echo " $0 [OPTIONS...] executable [args]"
echo " -d, --debug Increase log level"
}
debug=0
while [ $# -gt 0 ]; do
case $1 in
-d|--debug)
debug=$((debug+1))
;;
-h)
help;
exit 0
;;
--)
shift;
break;;
-*)
echo "Unrecognised option: $1";
help;
exit 1
;;
*)
break
;;
esac
shift
done
[ $debug -gt 0 ] && loglevel=V4L2Compat:0
[ $debug -gt 1 ] && loglevel=0
[ "$loglevel" != "" ] && export LIBCAMERA_LOG_LEVELS=$loglevel
if [ "$LD_PRELOAD" = "" ] ; then
LD_PRELOAD='@LIBCAMERA_V4L2_SO@'
else
LD_PRELOAD="$LD_PRELOAD "'@LIBCAMERA_V4L2_SO@'
fi
export LD_PRELOAD
exec "$@"
|
0 | repos/libcamera/src | repos/libcamera/src/android/camera_capabilities.cpp | /* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2021, Google Inc.
*
* Camera static properties manager
*/
#include "camera_capabilities.h"
#include <algorithm>
#include <array>
#include <cmath>
#include <map>
#include <type_traits>
#include <hardware/camera3.h>
#include <libcamera/base/log.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/property_ids.h>
#include "libcamera/internal/formats.h"
using namespace libcamera;
LOG_DECLARE_CATEGORY(HAL)
namespace {
/*
* \var camera3Resolutions
* \brief The list of image resolutions commonly supported by Android
*
* The following are defined as mandatory to be supported by the Android
* Camera3 specification: (320x240), (640x480), (1280x720), (1920x1080).
*
* The following 4:3 resolutions are defined as optional, but commonly
* supported by Android devices: (1280x960), (1600x1200).
*/
const std::vector<Size> camera3Resolutions = {
{ 320, 240 },
{ 640, 480 },
{ 1280, 720 },
{ 1280, 960 },
{ 1600, 1200 },
{ 1920, 1080 }
};
/*
* \struct Camera3Format
* \brief Data associated with an Android format identifier
* \var libcameraFormats List of libcamera pixel formats compatible with the
* Android format
* \var name The human-readable representation of the Android format code
*/
struct Camera3Format {
std::vector<PixelFormat> libcameraFormats;
bool mandatory;
const char *name;
};
/*
* \var camera3FormatsMap
* \brief Associate Android format code with ancillary data
*/
const std::map<int, const Camera3Format> camera3FormatsMap = {
{
HAL_PIXEL_FORMAT_BLOB, {
{ formats::MJPEG },
true,
"BLOB"
}
}, {
HAL_PIXEL_FORMAT_YCbCr_420_888, {
{ formats::NV12, formats::NV21 },
true,
"YCbCr_420_888"
}
}, {
/*
* \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc
* usage flag. For now, copy the YCbCr_420 configuration.
*/
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
{ formats::NV12, formats::NV21 },
true,
"IMPLEMENTATION_DEFINED"
}
}, {
HAL_PIXEL_FORMAT_RAW10, {
{
formats::SBGGR10_CSI2P,
formats::SGBRG10_CSI2P,
formats::SGRBG10_CSI2P,
formats::SRGGB10_CSI2P
},
false,
"RAW10"
}
}, {
HAL_PIXEL_FORMAT_RAW12, {
{
formats::SBGGR12_CSI2P,
formats::SGBRG12_CSI2P,
formats::SGRBG12_CSI2P,
formats::SRGGB12_CSI2P
},
false,
"RAW12"
}
}, {
HAL_PIXEL_FORMAT_RAW16, {
{
formats::SBGGR16,
formats::SGBRG16,
formats::SGRBG16,
formats::SRGGB16
},
false,
"RAW16"
}
},
};
const std::map<camera_metadata_enum_android_info_supported_hardware_level, std::string>
hwLevelStrings = {
{ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED, "LIMITED" },
{ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL, "FULL" },
{ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, "LEGACY" },
{ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3, "LEVEL_3" },
{ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL, "EXTERNAL" },
};
enum class ControlRange {
Min,
Def,
Max,
};
/**
* \brief Set Android metadata from libcamera ControlInfo or a default value
* \tparam T Type of the control in libcamera
* \tparam U Type of the metadata in Android
* \param[in] metadata Android metadata pack to add the control value to
* \param[in] tag Android metadata tag
* \param[in] controlsInfo libcamera ControlInfoMap from which to find the control info
* \param[in] control libcamera ControlId to find from \a controlsInfo
* \param[in] controlRange Whether to use the min, def, or max value from the control info
* \param[in] defaultValue The value to set in \a metadata if \a control is not found
*
* Set the Android metadata entry in \a metadata with tag \a tag based on the
* control info found for the libcamera control \a control in the libcamera
* ControlInfoMap \a controlsInfo. If no libcamera ControlInfo is found, then
* the Android metadata entry is set to \a defaultValue.
*
* This function is for scalar values.
*/
template<typename T, typename U>
U setMetadata(CameraMetadata *metadata, uint32_t tag,
const ControlInfoMap &controlsInfo, const Control<T> &control,
enum ControlRange controlRange, const U defaultValue)
{
U value = defaultValue;
const auto &info = controlsInfo.find(&control);
if (info != controlsInfo.end()) {
switch (controlRange) {
case ControlRange::Min:
value = static_cast<U>(info->second.min().template get<T>());
break;
case ControlRange::Def:
value = static_cast<U>(info->second.def().template get<T>());
break;
case ControlRange::Max:
value = static_cast<U>(info->second.max().template get<T>());
break;
}
}
metadata->addEntry(tag, value);
return value;
}
/**
* \brief Set Android metadata from libcamera ControlInfo or a default value
* \tparam T Type of the control in libcamera
* \tparam U Type of the metadata in Android
* \param[in] metadata Android metadata pack to add the control value to
* \param[in] tag Android metadata tag
* \param[in] controlsInfo libcamera ControlInfoMap from which to find the control info
* \param[in] control libcamera ControlId to find from \a controlsInfo
* \param[in] defaultVector The value to set in \a metadata if \a control is not found
*
* Set the Android metadata entry in \a metadata with tag \a tag based on the
* control info found for the libcamera control \a control in the libcamera
* ControlInfoMap \a controlsInfo. If no libcamera ControlInfo is found, then
* the Android metadata entry is set to \a defaultVector.
*
* This function is for vector values.
*/
template<typename T, typename U>
std::vector<U> setMetadata(CameraMetadata *metadata, uint32_t tag,
const ControlInfoMap &controlsInfo,
const Control<T> &control,
const std::vector<U> &defaultVector)
{
const auto &info = controlsInfo.find(&control);
if (info == controlsInfo.end()) {
metadata->addEntry(tag, defaultVector);
return defaultVector;
}
std::vector<U> values(info->second.values().size());
for (const auto &value : info->second.values())
values.push_back(static_cast<U>(value.template get<T>()));
metadata->addEntry(tag, values);
return values;
}
} /* namespace */
bool CameraCapabilities::validateManualSensorCapability()
{
const char *noMode = "Manual sensor capability unavailable: ";
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AE_AVAILABLE_MODES,
ANDROID_CONTROL_AE_MODE_OFF)) {
LOG(HAL, Info) << noMode << "missing AE mode off";
return false;
}
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE)) {
LOG(HAL, Info) << noMode << "missing AE lock";
return false;
}
/*
* \todo Return true here after we satisfy all the requirements:
* https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR
* Manual frame duration control
* android.sensor.frameDuration
* android.sensor.info.maxFrameDuration
* Manual exposure control
* android.sensor.exposureTime
* android.sensor.info.exposureTimeRange
* Manual sensitivity control
* android.sensor.sensitivity
* android.sensor.info.sensitivityRange
* Manual lens control (if the lens is adjustable)
* android.lens.*
* Manual flash control (if a flash unit is present)
* android.flash.*
* Manual black level locking
* android.blackLevel.lock
* Auto exposure lock
* android.control.aeLock
*/
return false;
}
bool CameraCapabilities::validateManualPostProcessingCapability()
{
const char *noMode = "Manual post processing capability unavailable: ";
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
ANDROID_CONTROL_AWB_MODE_OFF)) {
LOG(HAL, Info) << noMode << "missing AWB mode off";
return false;
}
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE)) {
LOG(HAL, Info) << noMode << "missing AWB lock";
return false;
}
/*
* \todo return true here after we satisfy all the requirements:
* https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING
* Manual tonemap control
* android.tonemap.curve
* android.tonemap.mode
* android.tonemap.maxCurvePoints
* android.tonemap.gamma
* android.tonemap.presetCurve
* Manual white balance control
* android.colorCorrection.transform
* android.colorCorrection.gains
* Manual lens shading map control
* android.shading.mode
* android.statistics.lensShadingMapMode
* android.statistics.lensShadingMap
* android.lens.info.shadingMapSize
* Manual aberration correction control (if aberration correction is supported)
* android.colorCorrection.aberrationMode
* android.colorCorrection.availableAberrationModes
* Auto white balance lock
* android.control.awbLock
*/
return false;
}
bool CameraCapabilities::validateBurstCaptureCapability()
{
camera_metadata_ro_entry_t entry;
bool found;
const char *noMode = "Burst capture capability unavailable: ";
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE)) {
LOG(HAL, Info) << noMode << "missing AE lock";
return false;
}
if (!staticMetadata_->entryContains<uint8_t>(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE)) {
LOG(HAL, Info) << noMode << "missing AWB lock";
return false;
}
found = staticMetadata_->getEntry(ANDROID_SYNC_MAX_LATENCY, &entry);
if (!found || *entry.data.i32 < 0 || 4 < *entry.data.i32) {
LOG(HAL, Info)
<< noMode << "max sync latency is "
<< (found ? std::to_string(*entry.data.i32) : "not present");
return false;
}
/*
* \todo return true here after we satisfy all the requirements
* https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE
*/
return false;
}
std::set<camera_metadata_enum_android_request_available_capabilities>
CameraCapabilities::computeCapabilities()
{
std::set<camera_metadata_enum_android_request_available_capabilities>
capabilities;
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
if (validateManualSensorCapability()) {
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
/* The requirements for READ_SENSOR_SETTINGS are a subset of MANUAL_SENSOR */
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
}
if (validateManualPostProcessingCapability())
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
if (validateBurstCaptureCapability())
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
if (rawStreamAvailable_)
capabilities.insert(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
return capabilities;
}
void CameraCapabilities::computeHwLevel(
const std::set<camera_metadata_enum_android_request_available_capabilities> &caps)
{
const char *noFull = "Hardware level FULL unavailable: ";
camera_metadata_ro_entry_t entry;
bool found;
camera_metadata_enum_android_info_supported_hardware_level
hwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
if (!caps.count(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
LOG(HAL, Info) << noFull << "missing manual sensor";
hwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
if (!caps.count(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING)) {
LOG(HAL, Info) << noFull << "missing manual post processing";
hwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
if (!caps.count(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE)) {
LOG(HAL, Info) << noFull << "missing burst capture";
hwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
found = staticMetadata_->getEntry(ANDROID_SYNC_MAX_LATENCY, &entry);
if (!found || *entry.data.i32 != 0) {
LOG(HAL, Info) << noFull << "missing or invalid max sync latency";
hwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
hwLevel_ = hwLevel;
}
int CameraCapabilities::initialize(std::shared_ptr<Camera> camera,
int orientation, int facing)
{
camera_ = camera;
orientation_ = orientation;
facing_ = facing;
rawStreamAvailable_ = false;
maxFrameDuration_ = 0;
/* Acquire the camera and initialize available stream configurations. */
int ret = camera_->acquire();
if (ret) {
LOG(HAL, Error) << "Failed to temporarily acquire the camera";
return ret;
}
ret = initializeStreamConfigurations();
if (ret) {
camera_->release();
return ret;
}
ret = initializeStaticMetadata();
camera_->release();
return ret;
}
std::vector<Size>
CameraCapabilities::initializeYUVResolutions(const PixelFormat &pixelFormat,
const std::vector<Size> &resolutions)
{
std::vector<Size> supportedResolutions;
std::unique_ptr<CameraConfiguration> cameraConfig =
camera_->generateConfiguration({ StreamRole::Viewfinder });
if (!cameraConfig) {
LOG(HAL, Error) << "Failed to get supported YUV resolutions";
return supportedResolutions;
}
StreamConfiguration &cfg = cameraConfig->at(0);
for (const Size &res : resolutions) {
cfg.pixelFormat = pixelFormat;
cfg.size = res;
CameraConfiguration::Status status = cameraConfig->validate();
if (status != CameraConfiguration::Valid) {
LOG(HAL, Debug) << cfg.toString() << " not supported";
continue;
}
LOG(HAL, Debug) << cfg.toString() << " supported";
supportedResolutions.push_back(res);
}
return supportedResolutions;
}
std::vector<Size>
CameraCapabilities::initializeRawResolutions(const PixelFormat &pixelFormat)
{
std::vector<Size> supportedResolutions;
std::unique_ptr<CameraConfiguration> cameraConfig =
camera_->generateConfiguration({ StreamRole::Raw });
if (!cameraConfig) {
LOG(HAL, Error) << "Failed to get supported Raw resolutions";
return supportedResolutions;
}
StreamConfiguration &cfg = cameraConfig->at(0);
const StreamFormats &formats = cfg.formats();
supportedResolutions = formats.sizes(pixelFormat);
return supportedResolutions;
}
/*
* Initialize the format conversion map to translate from Android format
* identifier to libcamera pixel formats and fill in the list of supported
* stream configurations to be reported to the Android camera framework through
* the camera static metadata.
*/
int CameraCapabilities::initializeStreamConfigurations()
{
/*
* Get the maximum output resolutions
* \todo Get this from the camera properties once defined
*/
std::unique_ptr<CameraConfiguration> cameraConfig =
camera_->generateConfiguration({ StreamRole::StillCapture });
if (!cameraConfig) {
LOG(HAL, Error) << "Failed to get maximum resolution";
return -EINVAL;
}
StreamConfiguration &cfg = cameraConfig->at(0);
/*
* \todo JPEG - Adjust the maximum available resolution by taking the
* JPEG encoder requirements into account (alignment and aspect ratio).
*/
const Size maxRes = cfg.size;
LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes;
/*
* Build the list of supported image resolutions.
*
* The resolutions listed in camera3Resolution are supported, up to the
* camera maximum resolution.
*
* Augment the list by adding resolutions calculated from the camera
* maximum one.
*/
std::vector<Size> cameraResolutions;
std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
std::back_inserter(cameraResolutions),
[&](const Size &res) { return res < maxRes; });
/*
* The Camera3 specification suggests adding 1/2 and 1/4 of the maximum
* resolution.
*/
for (unsigned int divider = 2;; divider <<= 1) {
Size derivedSize{
maxRes.width / divider,
maxRes.height / divider,
};
if (derivedSize.width < 320 ||
derivedSize.height < 240)
break;
cameraResolutions.push_back(derivedSize);
}
cameraResolutions.push_back(maxRes);
/* Remove duplicated entries from the list of supported resolutions. */
std::sort(cameraResolutions.begin(), cameraResolutions.end());
auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());
cameraResolutions.erase(last, cameraResolutions.end());
/*
* Build the list of supported camera formats.
*
* To each Android format a list of compatible libcamera formats is
* associated. The first libcamera format that tests successful is added
* to the format translation map used when configuring the streams.
* It is then tested against the list of supported camera resolutions to
* build the stream configuration map reported through the camera static
* metadata.
*/
Size maxJpegSize;
for (const auto &format : camera3FormatsMap) {
int androidFormat = format.first;
const Camera3Format &camera3Format = format.second;
const std::vector<PixelFormat> &libcameraFormats =
camera3Format.libcameraFormats;
LOG(HAL, Debug) << "Trying to map Android format "
<< camera3Format.name;
/*
* JPEG is always supported, either produced directly by the
* camera, or encoded in the HAL.
*/
if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
formatsMap_[androidFormat] = formats::MJPEG;
LOG(HAL, Debug) << "Mapped Android format "
<< camera3Format.name << " to "
<< formats::MJPEG
<< " (fixed mapping)";
continue;
}
/*
* Test the libcamera formats that can produce images
* compatible with the format defined by Android.
*/
PixelFormat mappedFormat;
for (const PixelFormat &pixelFormat : libcameraFormats) {
LOG(HAL, Debug) << "Testing " << pixelFormat;
/*
* The stream configuration size can be adjusted,
* not the pixel format.
*
* \todo This could be simplified once all pipeline
* handlers will report the StreamFormats list of
* supported formats.
*/
cfg.pixelFormat = pixelFormat;
CameraConfiguration::Status status = cameraConfig->validate();
if (status != CameraConfiguration::Invalid &&
cfg.pixelFormat == pixelFormat) {
mappedFormat = pixelFormat;
break;
}
}
if (!mappedFormat.isValid()) {
/* If the format is not mandatory, skip it. */
if (!camera3Format.mandatory)
continue;
LOG(HAL, Error)
<< "Failed to map mandatory Android format "
<< camera3Format.name << " ("
<< utils::hex(androidFormat) << "): aborting";
return -EINVAL;
}
/*
* Record the mapping and then proceed to generate the
* stream configurations map, by testing the image resolutions.
*/
formatsMap_[androidFormat] = mappedFormat;
LOG(HAL, Debug) << "Mapped Android format "
<< camera3Format.name << " to "
<< mappedFormat;
std::vector<Size> resolutions;
const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);
switch (info.colourEncoding) {
case PixelFormatInfo::ColourEncodingRAW:
if (info.bitsPerPixel != 16)
continue;
rawStreamAvailable_ = true;
resolutions = initializeRawResolutions(mappedFormat);
break;
case PixelFormatInfo::ColourEncodingYUV:
case PixelFormatInfo::ColourEncodingRGB:
/*
* We support enumerating RGB streams here to allow
* mapping IMPLEMENTATION_DEFINED format to RGB.
*/
resolutions = initializeYUVResolutions(mappedFormat,
cameraResolutions);
break;
}
for (const Size &res : resolutions) {
/*
* Configure the Camera with the collected format and
* resolution to get an updated list of controls.
*
* \todo Avoid the need to configure the camera when
* redesigning the configuration API.
*/
cfg.size = res;
int ret = camera_->configure(cameraConfig.get());
if (ret)
return ret;
const ControlInfoMap &controls = camera_->controls();
const auto frameDurations = controls.find(
&controls::FrameDurationLimits);
if (frameDurations == controls.end()) {
LOG(HAL, Error)
<< "Camera does not report frame durations";
return -EINVAL;
}
int64_t minFrameDuration = frameDurations->second.min().get<int64_t>() * 1000;
int64_t maxFrameDuration = frameDurations->second.max().get<int64_t>() * 1000;
/*
* Cap min frame duration to 30 FPS with 1% tolerance.
*
* 30 frames per second has been validated as the most
* opportune frame rate for quality tuning, and power
* vs performances budget on Intel IPU3-based
* Chromebooks.
*
* \todo This is a platform-specific decision that needs
* to be abstracted and delegated to the configuration
* file.
*
* \todo libcamera only allows to control frame duration
* through the per-request controls::FrameDuration
* control. If we cap the durations here, we should be
* capable of configuring the camera to operate at such
* duration without requiring to have the FrameDuration
* control to be specified for each Request. Defer this
* to the in-development configuration API rework.
*/
int64_t minFrameDurationCap = 1e9 / 30.0;
if (minFrameDuration < minFrameDurationCap) {
float tolerance =
(minFrameDurationCap - minFrameDuration) * 100.0 / minFrameDurationCap;
/*
* If the tolerance is less than 1%, do not cap
* the frame duration.
*/
if (tolerance > 1.0)
minFrameDuration = minFrameDurationCap;
}
/*
* Calculate FPS as CTS does and adjust the minimum
* frame duration accordingly: see
* Camera2SurfaceViewTestCase.java:getSuitableFpsRangeForDuration()
*/
minFrameDuration =
1e9 / static_cast<unsigned int>(floor(1e9 / minFrameDuration + 0.05f));
streamConfigurations_.push_back({
res, androidFormat, minFrameDuration, maxFrameDuration,
});
/*
* If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
* from which JPEG is produced, add an entry for
* the JPEG stream.
*
* \todo Wire the JPEG encoder to query the supported
* sizes provided a list of formats it can encode.
*
* \todo Support JPEG streams produced by the camera
* natively.
*
* \todo HAL_PIXEL_FORMAT_BLOB is a 'stalling' format,
* its duration should take into account the time
* required for the YUV to JPEG encoding. For now
* use the same frame durations as collected for
* the YUV/RGB streams.
*/
if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) {
streamConfigurations_.push_back({
res, HAL_PIXEL_FORMAT_BLOB,
minFrameDuration, maxFrameDuration,
});
maxJpegSize = std::max(maxJpegSize, res);
}
maxFrameDuration_ = std::max(maxFrameDuration_,
maxFrameDuration);
}
/*
* \todo Calculate the maximum JPEG buffer size by asking the
* encoder giving the maximum frame size required.
*/
maxJpegBufferSize_ = maxJpegSize.width * maxJpegSize.height * 1.5;
}
LOG(HAL, Debug) << "Collected stream configuration map: ";
for (const auto &entry : streamConfigurations_)
LOG(HAL, Debug) << "{ " << entry.resolution << " - "
<< utils::hex(entry.androidFormat) << " }";
return 0;
}
int CameraCapabilities::initializeStaticMetadata()
{
staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);
if (!staticMetadata_->isValid()) {
LOG(HAL, Error) << "Failed to allocate static metadata";
staticMetadata_.reset();
return -EINVAL;
}
/*
* Generate and apply a new configuration for the Viewfinder role to
* collect control limits and properties from a known state.
*/
std::unique_ptr<CameraConfiguration> cameraConfig =
camera_->generateConfiguration({ StreamRole::Viewfinder });
if (!cameraConfig) {
LOG(HAL, Error) << "Failed to generate camera configuration";
staticMetadata_.reset();
return -ENODEV;
}
int ret = camera_->configure(cameraConfig.get());
if (ret) {
LOG(HAL, Error) << "Failed to initialize the camera state";
staticMetadata_.reset();
return ret;
}
const ControlInfoMap &controlsInfo = camera_->controls();
const ControlList &properties = camera_->properties();
availableCharacteristicsKeys_ = {
ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
ANDROID_CONTROL_AE_AVAILABLE_MODES,
ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
ANDROID_CONTROL_AE_COMPENSATION_RANGE,
ANDROID_CONTROL_AE_COMPENSATION_STEP,
ANDROID_CONTROL_AE_LOCK_AVAILABLE,
ANDROID_CONTROL_AF_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_EFFECTS,
ANDROID_CONTROL_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
ANDROID_CONTROL_AWB_AVAILABLE_MODES,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_MAX_REGIONS,
ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
ANDROID_FLASH_INFO_AVAILABLE,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
ANDROID_JPEG_MAX_SIZE,
ANDROID_LENS_FACING,
ANDROID_LENS_INFO_AVAILABLE_APERTURES,
ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
ANDROID_SCALER_CROPPING_TYPE,
ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
ANDROID_SENSOR_ORIENTATION,
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
ANDROID_SYNC_MAX_LATENCY,
};
availableRequestKeys_ = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
ANDROID_CONTROL_AE_ANTIBANDING_MODE,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
ANDROID_CONTROL_AE_LOCK,
ANDROID_CONTROL_AE_MODE,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
ANDROID_CONTROL_AF_MODE,
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AWB_LOCK,
ANDROID_CONTROL_AWB_MODE,
ANDROID_CONTROL_CAPTURE_INTENT,
ANDROID_CONTROL_EFFECT_MODE,
ANDROID_CONTROL_MODE,
ANDROID_CONTROL_SCENE_MODE,
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
ANDROID_FLASH_MODE,
ANDROID_JPEG_ORIENTATION,
ANDROID_JPEG_QUALITY,
ANDROID_JPEG_THUMBNAIL_QUALITY,
ANDROID_JPEG_THUMBNAIL_SIZE,
ANDROID_LENS_APERTURE,
ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
ANDROID_NOISE_REDUCTION_MODE,
ANDROID_SCALER_CROP_REGION,
ANDROID_STATISTICS_FACE_DETECT_MODE
};
availableResultKeys_ = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
ANDROID_CONTROL_AE_ANTIBANDING_MODE,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
ANDROID_CONTROL_AE_LOCK,
ANDROID_CONTROL_AE_MODE,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AE_STATE,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
ANDROID_CONTROL_AF_MODE,
ANDROID_CONTROL_AF_STATE,
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AWB_LOCK,
ANDROID_CONTROL_AWB_MODE,
ANDROID_CONTROL_AWB_STATE,
ANDROID_CONTROL_CAPTURE_INTENT,
ANDROID_CONTROL_EFFECT_MODE,
ANDROID_CONTROL_MODE,
ANDROID_CONTROL_SCENE_MODE,
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
ANDROID_FLASH_MODE,
ANDROID_FLASH_STATE,
ANDROID_JPEG_GPS_COORDINATES,
ANDROID_JPEG_GPS_PROCESSING_METHOD,
ANDROID_JPEG_GPS_TIMESTAMP,
ANDROID_JPEG_ORIENTATION,
ANDROID_JPEG_QUALITY,
ANDROID_JPEG_SIZE,
ANDROID_JPEG_THUMBNAIL_QUALITY,
ANDROID_JPEG_THUMBNAIL_SIZE,
ANDROID_LENS_APERTURE,
ANDROID_LENS_FOCAL_LENGTH,
ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
ANDROID_LENS_STATE,
ANDROID_NOISE_REDUCTION_MODE,
ANDROID_REQUEST_PIPELINE_DEPTH,
ANDROID_SCALER_CROP_REGION,
ANDROID_SENSOR_EXPOSURE_TIME,
ANDROID_SENSOR_FRAME_DURATION,
ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
ANDROID_SENSOR_TEST_PATTERN_MODE,
ANDROID_SENSOR_TIMESTAMP,
ANDROID_STATISTICS_FACE_DETECT_MODE,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
ANDROID_STATISTICS_SCENE_FLICKER,
};
/* Color correction static metadata. */
{
std::vector<uint8_t> data;
data.reserve(3);
const auto &infoMap = controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);
if (infoMap != controlsInfo.end()) {
for (const auto &value : infoMap->second.values())
data.push_back(value.get<int32_t>());
} else {
data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);
}
staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
data);
}
/* Control static metadata. */
std::vector<uint8_t> aeAvailableAntiBandingModes = {
ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
aeAvailableAntiBandingModes);
std::vector<uint8_t> aeAvailableModes = {
ANDROID_CONTROL_AE_MODE_ON,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
aeAvailableModes);
std::vector<int32_t> aeCompensationRange = {
0, 0,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
aeCompensationRange);
const camera_metadata_rational_t aeCompensationStep[] = {
{ 0, 1 }
};
staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
aeCompensationStep);
std::vector<uint8_t> availableAfModes = {
ANDROID_CONTROL_AF_MODE_OFF,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
availableAfModes);
std::vector<uint8_t> availableEffects = {
ANDROID_CONTROL_EFFECT_MODE_OFF,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
availableEffects);
std::vector<uint8_t> availableSceneModes = {
ANDROID_CONTROL_SCENE_MODE_DISABLED,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
availableSceneModes);
std::vector<uint8_t> availableStabilizationModes = {
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
availableStabilizationModes);
/*
* \todo Inspect the camera capabilities to report the available
* AWB modes. Default to AUTO as CTS tests require it.
*/
std::vector<uint8_t> availableAwbModes = {
ANDROID_CONTROL_AWB_MODE_AUTO,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
availableAwbModes);
std::vector<int32_t> availableMaxRegions = {
0, 0, 0,
};
staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
availableMaxRegions);
std::vector<uint8_t> sceneModesOverride = {
ANDROID_CONTROL_AE_MODE_ON,
ANDROID_CONTROL_AWB_MODE_AUTO,
ANDROID_CONTROL_AF_MODE_OFF,
};
staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
sceneModesOverride);
uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
aeLockAvailable);
uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
awbLockAvailable);
char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
availableControlModes);
/* JPEG static metadata. */
/*
* Create the list of supported thumbnail sizes by inspecting the
* available JPEG resolutions collected in streamConfigurations_ and
* generate one entry for each aspect ratio.
*
* The JPEG thumbnailer can freely scale, so pick an arbitrary
* (160, 160) size as the bounding rectangle, which is then cropped to
* the different supported aspect ratios.
*/
constexpr Size maxJpegThumbnail(160, 160);
std::vector<Size> thumbnailSizes;
thumbnailSizes.push_back({ 0, 0 });
for (const auto &entry : streamConfigurations_) {
if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
continue;
Size thumbnailSize = maxJpegThumbnail
.boundedToAspectRatio({ entry.resolution.width,
entry.resolution.height });
thumbnailSizes.push_back(thumbnailSize);
}
std::sort(thumbnailSizes.begin(), thumbnailSizes.end());
auto last = std::unique(thumbnailSizes.begin(), thumbnailSizes.end());
thumbnailSizes.erase(last, thumbnailSizes.end());
/* Transform sizes in to a list of integers that can be consumed. */
std::vector<int32_t> thumbnailEntries;
thumbnailEntries.reserve(thumbnailSizes.size() * 2);
for (const auto &size : thumbnailSizes) {
thumbnailEntries.push_back(size.width);
thumbnailEntries.push_back(size.height);
}
staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
thumbnailEntries);
staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, maxJpegBufferSize_);
/* Sensor static metadata. */
std::array<int32_t, 2> pixelArraySize;
{
const Size &size = properties.get(properties::PixelArraySize).value_or(Size{});
pixelArraySize[0] = size.width;
pixelArraySize[1] = size.height;
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
pixelArraySize);
}
const auto &cellSize = properties.get<Size>(properties::UnitCellSize);
if (cellSize) {
std::array<float, 2> physicalSize{
cellSize->width * pixelArraySize[0] / 1e6f,
cellSize->height * pixelArraySize[1] / 1e6f
};
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
physicalSize);
}
{
const Span<const Rectangle> rects =
properties.get(properties::PixelArrayActiveAreas).value_or(Span<const Rectangle>{});
std::vector<int32_t> data{
static_cast<int32_t>(rects[0].x),
static_cast<int32_t>(rects[0].y),
static_cast<int32_t>(rects[0].width),
static_cast<int32_t>(rects[0].height),
};
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
data);
}
int32_t sensitivityRange[] = {
32, 2400,
};
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
sensitivityRange);
/* Report the color filter arrangement if the camera reports it. */
const auto &filterArr = properties.get(properties::draft::ColorFilterArrangement);
if (filterArr)
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
*filterArr);
const auto &exposureInfo = controlsInfo.find(&controls::ExposureTime);
if (exposureInfo != controlsInfo.end()) {
int64_t exposureTimeRange[2] = {
exposureInfo->second.min().get<int32_t>() * 1000LL,
exposureInfo->second.max().get<int32_t>() * 1000LL,
};
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
exposureTimeRange, 2);
}
staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, orientation_);
std::vector<int32_t> testPatternModes = {
ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
};
const auto &testPatternsInfo =
controlsInfo.find(&controls::draft::TestPatternMode);
if (testPatternsInfo != controlsInfo.end()) {
const auto &values = testPatternsInfo->second.values();
ASSERT(!values.empty());
for (const auto &value : values) {
switch (value.get<int32_t>()) {
case controls::draft::TestPatternModeOff:
/*
* ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is
* already in testPatternModes.
*/
break;
case controls::draft::TestPatternModeSolidColor:
testPatternModes.push_back(
ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
break;
case controls::draft::TestPatternModeColorBars:
testPatternModes.push_back(
ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);
break;
case controls::draft::TestPatternModeColorBarsFadeToGray:
testPatternModes.push_back(
ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);
break;
case controls::draft::TestPatternModePn9:
testPatternModes.push_back(
ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);
break;
case controls::draft::TestPatternModeCustom1:
/* We don't support this yet. */
break;
default:
LOG(HAL, Error) << "Unknown test pattern mode: "
<< value.get<int32_t>();
continue;
}
}
}
staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
testPatternModes);
uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
timestampSource);
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
maxFrameDuration_);
/* Statistics static metadata. */
uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
faceDetectMode);
int32_t maxFaceCount = 0;
staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
maxFaceCount);
{
std::vector<uint8_t> data;
data.reserve(2);
const auto &infoMap = controlsInfo.find(&controls::draft::LensShadingMapMode);
if (infoMap != controlsInfo.end()) {
for (const auto &value : infoMap->second.values())
data.push_back(value.get<int32_t>());
} else {
data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
}
staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
data);
}
/* Sync static metadata. */
setMetadata(staticMetadata_.get(), ANDROID_SYNC_MAX_LATENCY,
controlsInfo, controls::draft::MaxLatency,
ControlRange::Def,
ANDROID_SYNC_MAX_LATENCY_UNKNOWN);
/* Flash static metadata. */
char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
flashAvailable);
/* Lens static metadata. */
std::vector<float> lensApertures = {
2.53 / 100,
};
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
lensApertures);
uint8_t lensFacing;
switch (facing_) {
default:
case CAMERA_FACING_FRONT:
lensFacing = ANDROID_LENS_FACING_FRONT;
break;
case CAMERA_FACING_BACK:
lensFacing = ANDROID_LENS_FACING_BACK;
break;
case CAMERA_FACING_EXTERNAL:
lensFacing = ANDROID_LENS_FACING_EXTERNAL;
break;
}
staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);
std::vector<float> lensFocalLengths = {
1,
};
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
lensFocalLengths);
std::vector<uint8_t> opticalStabilizations = {
ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
};
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
opticalStabilizations);
float hypeFocalDistance = 0;
staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
hypeFocalDistance);
float minFocusDistance = 0;
staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
minFocusDistance);
/* Noise reduction modes. */
{
std::vector<uint8_t> data;
data.reserve(5);
const auto &infoMap = controlsInfo.find(&controls::draft::NoiseReductionMode);
if (infoMap != controlsInfo.end()) {
for (const auto &value : infoMap->second.values())
data.push_back(value.get<int32_t>());
} else {
data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);
}
staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
data);
}
/* Scaler static metadata. */
/*
* \todo The digital zoom factor is a property that depends on the
* desired output configuration and the sensor frame size input to the
* ISP. This information is not available to the Android HAL, not at
* initialization time at least.
*
* As a workaround rely on pipeline handlers initializing the
* ScalerCrop control with the camera default configuration and use the
* maximum and minimum crop rectangles to calculate the digital zoom
* factor.
*/
float maxZoom = 1.0f;
const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);
if (scalerCrop != controlsInfo.end()) {
Rectangle min = scalerCrop->second.min().get<Rectangle>();
Rectangle max = scalerCrop->second.max().get<Rectangle>();
maxZoom = std::min(1.0f * max.width / min.width,
1.0f * max.height / min.height);
}
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
maxZoom);
std::vector<uint32_t> availableStreamConfigurations;
std::vector<int64_t> minFrameDurations;
int maxYUVFps = 0;
Size maxYUVSize;
availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);
minFrameDurations.reserve(streamConfigurations_.size() * 4);
for (const auto &entry : streamConfigurations_) {
/*
* Filter out YUV streams not capable of running at 30 FPS.
*
* This requirement comes from CTS RecordingTest failures most
* probably related to a requirement of the camcoder video
* recording profile. Inspecting the Intel IPU3 HAL
* implementation confirms this but no reference has been found
* in the metadata documentation.
*/
unsigned int fps =
static_cast<unsigned int>(floor(1e9 / entry.minFrameDurationNsec));
if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB && fps < 30)
continue;
/*
* Collect the FPS of the maximum YUV output size to populate
* AE_AVAILABLE_TARGET_FPS_RANGE
*/
if (entry.androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888 &&
entry.resolution > maxYUVSize) {
maxYUVSize = entry.resolution;
maxYUVFps = fps;
}
/* Stream configuration map. */
availableStreamConfigurations.push_back(entry.androidFormat);
availableStreamConfigurations.push_back(entry.resolution.width);
availableStreamConfigurations.push_back(entry.resolution.height);
availableStreamConfigurations.push_back(
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
/* Per-stream durations. */
minFrameDurations.push_back(entry.androidFormat);
minFrameDurations.push_back(entry.resolution.width);
minFrameDurations.push_back(entry.resolution.height);
minFrameDurations.push_back(entry.minFrameDurationNsec);
LOG(HAL, Debug)
<< "Output Stream: " << utils::hex(entry.androidFormat)
<< " (" << entry.resolution << ")["
<< entry.minFrameDurationNsec << "]"
<< "@" << fps;
}
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
availableStreamConfigurations);
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
minFrameDurations);
/*
* Register to the camera service {min, max} and {max, max} with
* 'max' being the larger YUV stream maximum frame rate and 'min' being
* the globally minimum frame rate rounded to the next largest integer
* as the camera service expects the camera maximum frame duration to be
* smaller than 10^9 / minFps.
*/
int32_t minFps = std::ceil(1e9 / maxFrameDuration_);
int32_t availableAeFpsTarget[] = {
minFps, maxYUVFps, maxYUVFps, maxYUVFps,
};
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
availableAeFpsTarget);
std::vector<int64_t> availableStallDurations;
for (const auto &entry : streamConfigurations_) {
if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
continue;
availableStallDurations.push_back(entry.androidFormat);
availableStallDurations.push_back(entry.resolution.width);
availableStallDurations.push_back(entry.resolution.height);
availableStallDurations.push_back(entry.minFrameDurationNsec);
}
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
availableStallDurations);
uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, croppingType);
/* Request static metadata. */
int32_t partialResultCount = 1;
staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
partialResultCount);
{
/* Default the value to 2 if not reported by the camera. */
uint8_t maxPipelineDepth = 2;
const auto &infoMap = controlsInfo.find(&controls::draft::PipelineDepth);
if (infoMap != controlsInfo.end())
maxPipelineDepth = infoMap->second.max().get<int32_t>();
staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
maxPipelineDepth);
}
/* LIMITED does not support reprocessing. */
uint32_t maxNumInputStreams = 0;
staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
maxNumInputStreams);
/* Number of { RAW, YUV, JPEG } supported output streams */
int32_t numOutStreams[] = { rawStreamAvailable_, 2, 1 };
staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
numOutStreams);
/* Check capabilities */
capabilities_ = computeCapabilities();
/* This *must* be uint8_t. */
std::vector<uint8_t> capsVec(capabilities_.begin(), capabilities_.end());
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capsVec);
computeHwLevel(capabilities_);
staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, hwLevel_);
LOG(HAL, Info)
<< "Hardware level: " << hwLevelStrings.find(hwLevel_)->second;
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
std::vector<int32_t>(availableCharacteristicsKeys_.begin(),
availableCharacteristicsKeys_.end()));
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
std::vector<int32_t>(availableRequestKeys_.begin(),
availableRequestKeys_.end()));
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
std::vector<int32_t>(availableResultKeys_.begin(),
availableResultKeys_.end()));
if (!staticMetadata_->isValid()) {
LOG(HAL, Error) << "Failed to construct static metadata";
staticMetadata_.reset();
return -EINVAL;
}
if (staticMetadata_->resized()) {
auto [entryCount, dataCount] = staticMetadata_->usage();
LOG(HAL, Info)
<< "Static metadata resized: " << entryCount
<< " entries and " << dataCount << " bytes used";
}
return 0;
}
/* Translate Android format code to libcamera pixel format. */
PixelFormat CameraCapabilities::toPixelFormat(int format) const
{
auto it = formatsMap_.find(format);
if (it == formatsMap_.end()) {
LOG(HAL, Error) << "Requested format " << utils::hex(format)
<< " not supported";
return PixelFormat();
}
return it->second;
}
std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateManual() const
{
if (!capabilities_.count(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
LOG(HAL, Error) << "Manual template not supported";
return nullptr;
}
std::unique_ptr<CameraMetadata> manualTemplate = requestTemplatePreview();
if (!manualTemplate)
return nullptr;
return manualTemplate;
}
std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplatePreview() const
{
/*
* Give initial hint of entries and number of bytes to be allocated.
* It is deliberate that the hint is slightly larger than required, to
* avoid resizing the container.
*
* CameraMetadata is capable of resizing the container on the fly, if
* adding a new entry will exceed its capacity.
*/
auto requestTemplate = std::make_unique<CameraMetadata>(22, 38);
if (!requestTemplate->isValid()) {
return nullptr;
}
/* Get the FPS range registered in the static metadata. */
camera_metadata_ro_entry_t entry;
bool found = staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
&entry);
if (!found) {
LOG(HAL, Error) << "Cannot create capture template without FPS range";
return nullptr;
}
/*
* Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
* has been assembled as {{min, max} {max, max}}.
*/
requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
entry.data.i32, 2);
/*
* Get thumbnail sizes from static metadata and add the first non-zero
* size to the template.
*/
found = staticMetadata_->getEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
&entry);
ASSERT(found && entry.count >= 4);
requestTemplate->addEntry(ANDROID_JPEG_THUMBNAIL_SIZE,
entry.data.i32 + 2, 2);
uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);
int32_t aeExposureCompensation = 0;
requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
aeExposureCompensation);
uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
aePrecaptureTrigger);
uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);
uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
aeAntibandingMode);
uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;
requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);
uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);
uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);
uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);
uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);
uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
faceDetectMode);
uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
noiseReduction);
uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
aberrationMode);
uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);
float lensAperture = 2.53 / 100;
requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);
uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
opticalStabilization);
uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
captureIntent);
return requestTemplate;
}
std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateStill() const
{
std::unique_ptr<CameraMetadata> stillTemplate = requestTemplatePreview();
if (!stillTemplate)
return nullptr;
return stillTemplate;
}
std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateVideo() const
{
std::unique_ptr<CameraMetadata> previewTemplate = requestTemplatePreview();
if (!previewTemplate)
return nullptr;
/*
* The video template requires a fixed FPS range. Everything else
* stays the same as the preview template.
*/
camera_metadata_ro_entry_t entry;
staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
&entry);
/*
* Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
* has been assembled as {{min, max} {max, max}}.
*/
previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
entry.data.i32 + 2, 2);
return previewTemplate;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.