Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

Auf dieser Seite werden ein paar einfach Computer Vision Algorithmen gezeigt und erklärt.

Background Subtraction

Wollen wir beispielsweise Bewegung auf einem Bild erkennen, so ist es hilfreich eine Art “Green-Screen” zu haben, welcher dann nur die Bereiche zeichnet, welche sich verändern. Dies erreichen wir mit einem sog. Background-Removal oder Background-Substraction. Dazu vergleichen wir ein statisches Referenzbild mit einem Live-Bewegtbild. Wenn die Differenz zwischen beiden Bildern (und damit die Differenz der Farbwerte an der Selben Stelle) grösser ist als ein bestimmter Schwellenwert, können wir davon ausgehen, dass an diesem Punkt Bewegung stattfindet. Leider muss man als Abstrich sagen, dass die automatische Helligkeitskorrektur der iSight Kamera sich schlecht für diese Methode auswirkt.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;
 
PImage backgroundImage;
float threshold = 20;
 
void setup() {
  size(640, 480);
  
  // start video captire
  video = new Capture(this, width, height, 30);
  video.start();
  
  // prepare image to save background
  backgroundImage = createImage(video.width, video.height, RGB);
}
 
void draw() {
  // read camera image if (video.availableavailable
  if (video.available()) {
    video.read();
  }
  
  // active pixel manipulation of canvas
  loadPixels();
  
  // get pixel data from video and background image
  video.loadPixels(); 
  backgroundImage.loadPixels();
   
 for (int x=0; x // loop through video pixel by pixel
  for (int x=0; x < video.width; x++) {
    for (int y=0; y < video.height; y++) {
      // get pixel array location
      int loc = x + y * video.width;
      
color fgColor = video.pixels[loc];   // get foreground color  (video)
      color bgColorfgColor = backgroundImagevideo.pixels[loc];
      
      // get background color (image)
      color bgColor = backgroundImage.pixels[loc];
       
      // get individual colors
      float r1 = red(fgColor);
      float g1 = green(fgColor);
      float b1 = blue(fgColor);
      float r2 = red(bgColor);
      float g2 = green(bgColor);
      float b2 = blue(bgColor);
      float diff
      // calculate spacial distance between the two colors
      float dist = dist(r1, g1, b1, r2, g2, b2);
       
      // check if distance (diffis >above threshold)
{      if (dist  pixels[loc] = fgColor;> threshold) {
       } else { // write foreground pixel
        pixels[loc] = color(0)fgColor;
      } }else {
     }   }// set pixel  updatePixels();
}to black
   void mousePressed() {   pixels[loc] = backgroundImage.copy(video, 0, 0, video.width, video.height, 0, 0, video.width, video.height);
  backgroundImage.updatePixels();
}

Hellster Punkt

Image Removed

Für die direkte Steuerung eines Interfaces kann es hilfreich sein, zu wissen, wo sich der hellste Punkt in einem Bild befindet. Das Beispiel “P04_6_Video_Kamerabild_Hellster_Punkt” zeigt wie es geht. Dazu wird ein PVector erstellt und eine Variable, welche den jeweils hellsten Wert für das aktuelle Frame beinhaltet. Durch das Vergleichen der Helligkeitswerte im ganzen Frame kann sehr schnell der Hellste Punkt bestimmt werden.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

PVector brightestPoint = new PVector(0,0);

void setup() {
  size(640, 480);
  video = new Capture(this, width, height, 30);
  video.start();
}

void draw() {
  if (video.available()) {
    video.read();
  }

  float brightness = 0;

  for (int x=0; x < width; x++) {
    for (int y=0; y < height; y++) {
      int loc = x + y * width;
      color c = video.pixels[loc];

      if (brightness(c) > brightness) {
        brightness = brightness(c);color(0);
      }
    }
  }
 
  // write pixel back to canvas
  updatePixels();
}
 
void mousePressed() { 
  // copy current video frame into background image
  backgroundImage.copy(video, 0, 0, video.width, video.height, 0, 0, video.width, video.height);
  backgroundImage.updatePixels();
}

Hellster Punkt

Image Added

Für die direkte Steuerung eines Interfaces kann es hilfreich sein, zu wissen, wo sich der hellste Punkt in einem Bild befindet. Dazu wird ein PVector erstellt und eine Variable, welche den jeweils hellsten Wert für das aktuelle Frame beinhaltet. Durch das Vergleichen der Helligkeitswerte im ganzen Frame kann sehr schnell der Hellste Punkt bestimmt werden.

Code Block
languagejava
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

void setup() {
  size(640, 480);

  // start video capture
  video = new Capture(this, width, height, 30);
  video.start();
}

void draw() {
  // read new video frame if available
  if (video.available()) {
    video.read();
  }

  // initially set brightness to zero
  float brightness = 0;

  // initially set point to center
  PVector point = new PVector(width/2, height/2);

  // go through video pisel by pixel
  for (int x=0; x < width; x++) {
    for (int y=0; y < height; y++) {
      // get pixel location
      int loc = x + y * width;

      // get color of pixel
      color c = video.pixels[loc];

      // check if brightness is higher than current value
      if (brightness(c) > brightness) {
        // set new brightness
        brightness = brightness(c);

        // save location of brighter point
        brightestPointpoint.x = x;
        brightestPointpoint.y = y;
      }
    }
  }

  // draw video
  image(video, 0, 0);

  // draw circle
  ellipse(brightestPointpoint.x, brightestPointpoint.y, 20, 20);
}

Farbtracking

...

}

Farbtracking

Image Added

Das Farbtracking ist eine sehr einfach Method um ein farbiges Objekt in einem Bild zu finden. Dazu wird einfach der Punkt im Bild gesucht der der festgelegtern Farbe am ähnlichsten ist.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

color trackColor; 

void setup() {
  size(640, 480);
  
  video = new Capture(this, width, height,
15);   video.start();

  trackColor = color(255, 0, 0);
  smooth();
}

void draw() {
  if (video.available()) {
  size(640, 480);

  // start video capture
  video = new Capture(this, width, height, 15);
  video.readstart();

  }// initialize track  video.loadPixels();
  image(videocolor to red
  trackColor = color(255, 0, 0);
}

void floatdraw() worldRecord{
= 500; // read video frame intif closestX =available
0;   int closestY = 0;
  
  PVector closestPoint = new PVector();
 if (video.available()) {
    video.read();
  }

  for// (int x=0; x <load pixels
  video.widthloadPixels();

x++) { // draw video
 for image(intvideo, y=0; y < video.height; y++) {
      int loc = x + y * video.width;
      color currentColor = video.pixels[loc], 0);

  // initialize record to number greater than the diagonal of the screen
  float record = width+height;

  // initialize variable to store closest  point
  PVector currColorVecclosestPoint = new PVector(red(currentColor), green(currentColor), blue(currentColor));
   ;
  
  // get track color as vector
  PVector trackColorVec = new PVector(red(trackColor), green(trackColor), blue(trackColor));

  // go through floatimage diffpixel = currColorVec.dist(trackColorVec);by pixel
      
      iffor (diff < worldRecord) {
        worldRecord = diff;
 int x=0; x < video.width; x++) {
    for  closestPoint.x = x(int y=0; y < video.height; y++) {
   closestPoint.y = y; // get pixel location
  }    int }loc = x }+ y   if (worldRecord < 10) {* video.width;
      
 fill(trackColor);     strokeWeight(4.0);
    stroke(0);// get pixel color
      ellipse(closestPoint.x, closestPoint.y, 50, 50)color currentColor = video.pixels[loc];

 } }  void mousePressed() {// get current intcolor locas =vector
mouseX + mouseY * video.width;  PVector trackColorcurrColorVec = video.pixels[loc];
}

Blob Detection

Image Removed

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

BlobDetector blobDetector;
color blobColor; 

void setup() {
  size(640, 480);

  video = new Capture(this, width, height, 15);
  video.start();
  
  blobDetector = new BlobDetector();
  blobColor = color(255, 0, 0);

  smooth();
}

void draw() {
  if (video.available()new PVector(red(currentColor), green(currentColor), blue(currentColor)); 
      
      // calculate distance between current color and track color
      float dist = currColorVec.dist(trackColorVec);

      // save point if closer than previous
      if (dist < record) {
    video.read();    record }= dist;
    video.loadPixels();    closestPoint.x = //x;
for black and white image   // video.filter(THRESHOLD,0.1);closestPoint.y = y;
     image(video, 0, 0); }
    }
 blobDetector.findBlob(blobColor, 20); }

  blobDetector.drawBlob();
  blobDetector.drawBoundingBox();
  blobDetector.drawCenterOfMass();
}

void mousePressed() {
  int loc = mouseX + mouseY*video.width;
  blobColor = video.pixels[loc];
}

class BlobDetector {
  int blobPoints[][];
  int blobWeight = 0;
  PVector centerOfMass;

  BlobDetector// draw point if we found a one that is less than 10 apart
  if (record < 10) {
    fill(trackColor);
    strokeWeight(4.0);
    stroke(0);
    ellipse(closestPoint.x, closestPoint.y, 50, 50);
  }
}

void mousePressed() {
  // save color blobPointsof current =pixel new int[width][height];
under the mouse
  int centerOfMassloc = new PVector(0, 0);
  }

  void findBlob(color blobColor, int threshold) {
    blobWeight = 0;

    for (int x = 0; x < width; x ++ ) {
      for (int y = 0; y < height; y ++ ) {
        int loc = x + y*width;
        color currentColor = video.pixels[loc];

        PVector currColorVec = new PVector(red(currentColor), green(currentColor), blue(currentColor));
        PVector trackColorVec = new PVector(red(blobColor), green(blobColor), blue(blobColor));
        float diff = currColorVec.dist(trackColorVec);

        if (diff < thresholdmouseX + mouseY * video.width;
  trackColor = video.pixels[loc];
}

Blob Detection

Image Added

Die Blob Detection is schon ein komplexere Art von Algorithm, wo ein gesamtes Objekt (Blop) zu erkennen versucht wird.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;

Capture video;

// the color to track
color trackColor;

// a dimensional array to store marked pixels
boolean marks[][];

// the total marked pixels
int total = 0;

// the most top left pixel
PVector topLeft;

// the most bottom right pixel
PVector bottomRight;

void setup() {
  size(640, 480);

  // start video capture
  video = new Capture(this, width, height, 15);
  video.start();

  // set initial track color to red
  trackColor = color(255, 0, 0);

  // initialize marks array
  marks = new boolean[width][height];
}

void draw() {
  // read video frame if available
  if (video.available()) {
          blobPoints[x][y] = 1;video.read();
  }

  // draw video image
  image(video,  blobWeight++0, 0);

  // find track color with }treshold
else { findBlob(20);

  // load canvas pixels
  blobPoints[x][y] = 0;
   loadPixels();

  // draw blob
  for (int }x = 0; x < width; x }++ ) {
  }  for (int y = 0; y <  text(blobWeight, 20, 20);
  }height; y ++ ) {
     void drawBlob() {
 // get pixel location
  if (blobWeight > 200) {int loc = x + y*width;

for (int x = 0; x <// width;make xpixel ++red )if {marked
      if  for (int y = 0; y < height; y ++ ) {
 (marks[x][y]) {
        pixels[loc] = color(255, 0, 0);
      }
 if (blobPoints[x][y] == 1)}
{  }

  // set canvas pixels
   strokeWeightupdatePixels(5);

  // draw bounding    box
  stroke(255, 0, 0);
  noFill();
  rect(topLeft.x,       point(topLeft.y, bottomRight.x-topLeft.x, bottomRight.y-topLeft.y);
   
      }

void mousePressed() {
  // save }current pixel under mouse as track color
}  int loc = }mouseX   }

  void drawBoundingBox() {+ mouseY*video.width;
  trackColor  PVector A = new PVector(width, height);
    PVector B = new PVector(0, 0)= video.pixels[loc];
}

void findBlob(int threshold) {
  // reset total
  total = 0;

  // forprepare (intpoint xtrackers
= 0; x < width; x ++ ) {
      for (int yint lowestX = width;
  int lowestY = height;
  int highestX = 0;
y < height; y ++ ) {
        if (blobPoints[x][y] == 1) {
          if (y < A.y)
       int highestY = 0;

  // prepare track color vector
  PVector trackColorVec = new PVector(red(trackColor), green(trackColor), blue(trackColor));

  // go through image pixel by pixel
  for (int x = 0; x < width; x ++ ) {
    for (int A.y = y0; y <         if (y > B.y)height; y ++ ) {
      // get pixel location
  B.y = y;  int loc = x + y*width;

  if (x < A.x) // get color of pixel
      color A.xcurrentColor = xvideo.pixels[loc];

      // get vector of pixel color
if (x > B.x)   PVector currColorVec = new PVector(red(currentColor), green(currentColor), blue(currentColor));

  B.x = x;  // get distance to track color
 }     float dist }
= currColorVec.dist(trackColorVec);
   }   
  strokeWeight(1);    // reset noFill();mark
    rect(A.x, A.y, B.x-A.x, B.y-A.y);
    text("A "+A.x+","+A.y, A.x, A.y);
    text("B "+B.x+","+B.y, B.x, B.y);
  }

  void drawCenterOfMass() {
    centerOfMass.set(0.0, 0.0, 0.0);

    for (int x = 0; x < width; x ++ ) {
      for (int y = 0; y < height; y ++ ) { marks[x][y] = false;

      // check if distance is below threshold
      if (dist < threshold) {
        // mark pixel 
        marks[x][y] = true;
        total++;

        // update point trackers 
        if (blobPoints[x][y] == 1 < lowestX) {lowestX = x;
        if centerOfMass.(x + > highestX) highestX = x;
        if (y < lowestY) centerOfMass.ylowestY += y;
        }if (y >     }
    }
  highestY) highestY = y;
      centerOfMass.x}
/= blobWeight;   }
 centerOfMass.y /=}
blobWeight;
  // save locations
noStroke();  topLeft = new fill(255PVector(lowestX, lowestY);
  bottomRight = new ellipsePVector(centerOfMass.xhighestX, centerOfMass.y, 10, 10highestY);
  }
}

Weitere Informationen