Image Retrieval System by Colour from the Web Using C++ with Openframeworks

Showing an image and replacing it with another image if something happens

Hummm... Without running the code, I'd bet the problem is that you rely on the coordinates (XRc and it's siblings) being zero to choose which image to use. They are all initiated to 0, so the first run goes fine, but... you never reset them to zero, do you? So after they all have being changed once by detecting the 3 colors your test became useless. Perhaps you can reset them all to zero whenever a color is detected.

And maybe you don't need the boolean at all...

What do think of this?

PSEUDO

//global
PImage imgs = new PImage[3];

int imageToDispaly = 0;

//all the stuff...

if(XRc != 0 || YRc != 0) { // color Green detected
// not sure this will work, but the idea is some thing like this.
XRh = YRh = XRc2 = YRc2 = 0;
imageToDispaly = 0;
}

if(XRh != 0 || YRh != 0) { // color blue detected
XRc = YRc = XRc2 = YRc2 = 0;
imageToDispaly = 1;
}

if(XRc2 != 0 || YRc2 != 0) { // color red detected
XRh = YRh = XRc = YRc = 0;
imageToDispaly = 2;
}

// at appropriated time...
image(imgs[imageToDispaly], x, y);

Opencv L*a*b* to RGB conversion produces grayscale output

Never mind. I managed to solve it myself and it was very enjoyable. For anyone that's interested and having the same troubles as I once had, here's the algorithm and some code:

  1. Convert CIE-Lab* to XYZ. This is necessary because CIE-Lab* is not a linear color space so there's no known direct conversion to RGB.

    void CIElabtoXYZ(cv::Mat& image, cv::Mat& output){
    float WhitePoint[3] = {0.950456, 1, 1.088754};
    Mat fX = Mat::zeros(image.size(), CV_32FC1);
    Mat fY = Mat::zeros(image.size(), CV_32FC1);
    Mat fZ = Mat::zeros(image.size(), CV_32FC1);
    Mat invfX = Mat::zeros(image.size(), CV_32FC1);
    Mat invfY = Mat::zeros(image.size(), CV_32FC1);
    Mat invfZ = Mat::zeros(image.size(), CV_32FC1);

    for(int x = 0;x < image.rows;x++){
    for(int y = 0;y < image.cols;y++){
    fY.at<float>(x,y) = (image.at<Vec3f>(x,y)[0] + 16.0) / 116.0;
    fX.at<float>(x,y) = fY.at<float>(x,y) + image.at<Vec3f>(x,y)[1] / 500.0;
    fZ.at<float>(x,y) = fY.at<float>(x,y) - image.at<Vec3f>(x,y)[2] / 200.0;
    }
    }
    invf(fX, invfX);
    invf(fY, invfY);
    invf(fZ, invfZ);
    for(int x = 0;x < image.rows;x++){
    for(int y = 0;y < image.cols;y++){
    output.at<Vec3f>(x,y)[0] = WhitePoint[0] * invfX.at<float>(x,y);
    output.at<Vec3f>(x,y)[1] = WhitePoint[1] * invfY.at<float>(x,y);
    output.at<Vec3f>(x,y)[2] = WhitePoint[2] * invfZ.at<float>(x,y);
    }
    }
    }

    void invf(cv::Mat& input, cv::Mat& output){
    for(int x = 0;x < input.rows;x++){
    for(int y = 0;y < input.cols;y++){
    output.at<float>(x,y) = pow(input.at<float>(x,y), 3);
    if(output.at<float>(x,y) < 0.008856){
    output.at<float>(x,y) = (input.at<float>(x,y) - 4.0/29.0)*(108.0/841.0);
    }
    }
    }
    }
  2. Convert XYZ to RGB

    void XYZtoRGB(cv::Mat& input, cv::Mat& output){
    float data[3][3] = {{3.240479, -1.53715, -0.498535}, {-0.969256, 1.875992, 0.041556}, {0.055648, -0.204043, 1.057311}};
    Mat T = Mat(3, 3, CV_32FC1, &data);
    Mat R = Mat::zeros(input.size(), CV_32FC1);
    Mat G = Mat::zeros(input.size(), CV_32FC1);
    Mat B = Mat::zeros(input.size(), CV_32FC1);

    for(int x = 0;x < input.rows;x++){
    for(int y = 0;y < input.cols;y++){
    R.at<float>(x,y) = T.at<float>(0,0)*input.at<Vec3f>(x,y)[0] + T.at<float>(1,0)*input.at<Vec3f>(x,y)[1] + T.at<float>(2,0)*input.at<Vec3f>(x,y)[2];
    G.at<float>(x,y) = T.at<float>(0,1)*input.at<Vec3f>(x,y)[0] + T.at<float>(1,1)*input.at<Vec3f>(x,y)[1] + T.at<float>(2,1)*input.at<Vec3f>(x,y)[2];
    B.at<float>(x,y) = T.at<float>(0,2)*input.at<Vec3f>(x,y)[0] + T.at<float>(1,2)*input.at<Vec3f>(x,y)[1] + T.at<float>(2,2)*input.at<Vec3f>(x,y)[2];
    }
    }

    //Desaturate and rescale to constrain resulting RGB values to [0,1]
    double RminVal, GminVal, BminVal;
    double RmaxVal, GmaxVal, BmaxVal;
    Point minLoc;
    Point maxLoc;

    minMaxLoc( R, &RminVal, &RmaxVal, &minLoc, &maxLoc );
    minMaxLoc( G, &GminVal, &GmaxVal, &minLoc, &maxLoc );
    minMaxLoc( B, &BminVal, &BmaxVal, &minLoc, &maxLoc );

    Mat matMin = Mat::zeros(1, 4, CV_32FC1), matMax = Mat::zeros(1, 4, CV_32FC1);
    matMin.at<float>(0,0) = RminVal; matMin.at<float>(0,1) = GminVal; matMin.at<float>(0,2) = BminVal; matMin.at<float>(0,3) = 0;
    double min, max;
    minMaxLoc( matMin, &min, &max, &minLoc, &maxLoc );
    float addWhite = -min;
    matMax.at<float>(0,0) = RmaxVal + addWhite; matMax.at<float>(0,1) = GmaxVal + addWhite; matMax.at<float>(0,2) = BmaxVal + addWhite; matMax.at<float>(0,3) = 1;
    minMaxLoc( matMax, &min, &max, &minLoc, &maxLoc );
    float Scale = max;

    for(int x = 0;x < input.rows;x++){
    for(int y = 0;y < input.cols;y++){
    output.at<Vec3f>(x,y)[2] = (R.at<float>(x,y) + addWhite) / Scale;
    output.at<Vec3f>(x,y)[1] = (G.at<float>(x,y) + addWhite) / Scale;
    output.at<Vec3f>(x,y)[0] = (B.at<float>(x,y) + addWhite) / Scale;
    }
    }
    imshow("Unscaled RGB", output);
    }

chroma key with openframeworks/opengl

You could use glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_DST_ALPHA) and then when you enable blending, you could use a pixel shader to do what you want if the pixel is of specific color (set alpha to 0).

How to get the average color of a specific area in a webcam feed (Processing/JavaScript)?

You're so close, but missing out one important aspect: the number of pixels you're sampling.

Notice in the example code that is commented out for a full image you're dividing by the full number of pixels (pixels.length).

However, in your adapted version you want to compute the average colour of only a subsection of the full image which means a smaller number of pixels.

You're only sampling an area that is 100x100 pixels meaning you need to divide by 10000 instead of webcam.pixels.length (1920x1000). That is why you get 0 as it's integer division.
This is what I mean in code:

 int totalSampledPixels = rWidth * rWidth;
r /= totalSampledPixels;
g /= totalSampledPixels;
b /= totalSampledPixels;

Full tweaked sketch:

import processing.video.*;

Capture webcam;
Capture cap;
PImage bg_img;

color bgColor = color(0, 0, 0);

int rMargin = 50;
int rWidth = 100;
int rHeight = 100;

color input = color(0, 0, 0);
color background = color(255, 255, 255);
color current;
int bgTolerance = 5;

void setup() {
size(1280,720);

// start the webcam
String[] inputs = Capture.list();
if (inputs.length == 0) {
println("Couldn't detect any webcams connected!");
exit();
}
webcam = new Capture(this, inputs[0]);

webcam.start();

}

void draw() {
if (webcam.available()) {

// read from the webcam
webcam.read();

image(webcam, 0,0);
webcam.loadPixels();

noFill();
strokeWeight(2);
stroke(255,255, 255);
rect(rMargin, rMargin, rWidth, rHeight);

int yCenter = (rWidth/2) + rMargin;
int xCenter = (rWidth/2) + rMargin;
// rectMode(CENTER);

int rectCenterIndex = (width* yCenter) + xCenter;

int r = 0, g = 0, b = 0;

//for whole image:
//for (int i=0; i<bg_img.pixels.length; i++) {
// color c = bg_img.pixels[i];
// r += c>>16&0xFF;
// g += c>>8&0xFF;
// b += c&0xFF;
//}
//r /= bg_img.pixels.length;
//g /= bg_img.pixels.length;
//b /= bg_img.pixels.length;

//CALCULATE AVG COLOR:
int i;
for(int x = 0; x <= width; x++){
for(int y = 0; y <= height; y++){
if (x >= rMargin && x <= rMargin + rWidth && y >= rMargin && y <= rMargin + rHeight){

i = (width*y) + x;
color c = webcam.pixels[i];
r += c>>16&0xFF;
g += c>>8&0xFF;
b += c&0xFF;

}
}
}
//divide by just the area sampled (x >= 50 && x <= 150 && y >= 50 && y <= 150 is a 100x100 px area)
int totalSampledPixels = rWidth * rHeight;

r /= totalSampledPixels;
g /= totalSampledPixels;
b /= totalSampledPixels;

fill(r,g,b);
rect(rMargin + rWidth, rMargin, rWidth, rHeight);

println(r + " " + g + " " + b);
}
}

Bare in mind this is averaging in the RGB colour space which is not the same as perceptual colour space. For example, if you average red and yellow you'd expect orange, but in RGB, a bit of red and green makes yellow.

Hopefully the RGB average is good enough for what you need, otherwise you may need to convert from RGB to CIE XYZ colour space then to Lab colour space to compute the perceptual average (then convert back to XYZ and RGB to display on screen). If that is something you're interested in trying, you can find an older answer demonstrating this in openFrameworks (which you'll notice can be similar to Processing in simple scenarios).

Java: how to convert RGB color to CIE Lab

Here's my implementation:

import java.awt.color.ColorSpace;

public class CIELab extends ColorSpace {

public static CIELab getInstance() {
return Holder.INSTANCE;
}

@Override
public float[] fromCIEXYZ(float[] colorvalue) {
double l = f(colorvalue[1]);
double L = 116.0 * l - 16.0;
double a = 500.0 * (f(colorvalue[0]) - l);
double b = 200.0 * (l - f(colorvalue[2]));
return new float[] {(float) L, (float) a, (float) b};
}

@Override
public float[] fromRGB(float[] rgbvalue) {
float[] xyz = CIEXYZ.fromRGB(rgbvalue);
return fromCIEXYZ(xyz);
}

@Override
public float getMaxValue(int component) {
return 128f;
}

@Override
public float getMinValue(int component) {
return (component == 0)? 0f: -128f;
}

@Override
public String getName(int idx) {
return String.valueOf("Lab".charAt(idx));
}

@Override
public float[] toCIEXYZ(float[] colorvalue) {
double i = (colorvalue[0] + 16.0) * (1.0 / 116.0);
double X = fInv(i + colorvalue[1] * (1.0 / 500.0));
double Y = fInv(i);
double Z = fInv(i - colorvalue[2] * (1.0 / 200.0));
return new float[] {(float) X, (float) Y, (float) Z};
}

@Override
public float[] toRGB(float[] colorvalue) {
float[] xyz = toCIEXYZ(colorvalue);
return CIEXYZ.toRGB(xyz);
}

CIELab() {
super(ColorSpace.TYPE_Lab, 3);
}

private static double f(double x) {
if (x > 216.0 / 24389.0) {
return Math.cbrt(x);
} else {
return (841.0 / 108.0) * x + N;
}
}

private static double fInv(double x) {
if (x > 6.0 / 29.0) {
return x*x*x;
} else {
return (108.0 / 841.0) * (x - N);
}
}

private Object readResolve() {
return getInstance();
}

private static class Holder {
static final CIELab INSTANCE = new CIELab();
}

private static final long serialVersionUID = 5027741380892134289L;

private static final ColorSpace CIEXYZ =
ColorSpace.getInstance(ColorSpace.CS_CIEXYZ);

private static final double N = 4.0 / 29.0;

}


Related Topics



Leave a reply



Submit