Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

...

Für die direkte Steuerung eines Interfaces kann es hilfreich sein, zu wissen, wo sich der hellste Punkt in einem Bild befindet. Dazu wird ein PVector erstellt und eine Variable, welche den jeweils hellsten Wert für das aktuelle Frame beinhaltet. Durch das Vergleichen der Helligkeitswerte im ganzen Frame kann sehr schnell der Hellste Punkt bestimmt werden.

Code Block
languagejava
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

PVector brightestPoint = new PVector(0,0);

void setup() {
  size(640, 480);

  // start video capture
  video = new Capture(this, width, height, 30);
  video.start();
}

void draw() {
  // read new video frame if available
  if (video.available()) {
    video.read();
  }

  // initially floatset brightness =to 0;zero
  float forbrightness (int= x=0;

 x <// width; x++) {
 initially set point to center
  forPVector (intpoint y=0; y <new PVector(width/2, height/2);
y++)
{  // go through video pisel intby locpixel
= x + y * for (int x=0; x < width; x++) {
    colorfor c(int y= video.pixels[loc];

0; y < height; y++) {
     if (brightness(c) > brightness) { // get pixel location
      int loc brightness = brightness(c)x + y * width;

      // brightestPoint.xget =color x;of pixel
      color brightestPoint.yc = y;video.pixels[loc];

      }// check if brightness is }higher than current }value
     image(video, 0, 0);
  ellipse(brightestPoint.x, brightestPoint if (brightness(c) > brightness) {
        // set new brightness
        brightness = brightness(c);

        // save location of brighter point
        point.x = x;
        point.y = y;
      }
    }
  }

  // draw video
  image(video, 0, 0);

  // draw circle
  ellipse(point.x, point.y, 20, 20);
}

Farbtracking

...

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

color trackColor;


void setup() {
  size(640, 480);
  ;

  // start video capture
  video = new Capture(this, width, height, 15);
  video.start();

  trackColor = color(255, 0, 0);
  smooth();
}

void draw() {
  if (video.available()) {
    video.read();
  }

  video.loadPixels();
  image(video// initialize track color to red
  trackColor = color(255, 0, 0);
}

void floatdraw() worldRecord{
= 500; // read video frame intif closestXavailable
= 0;   int closestY = 0;
  
  PVector closestPoint = new PVector();
 if (video.available()) {
    video.read();
  }

  for// (int x=0; x <load pixels
  video.widthloadPixels();

x++) { // draw video
 for (int y=0; y < video.height; y++) {
 image(video, 0, 0);

  // intinitialize locrecord =to xnumber +greater ythan * video.width;
  the diagonal of the screen
  float colorrecord currentColor = video.pixels[loc]width+height;

  // initialize variable to store closest  point
  PVector currColorVec = new PVector(red(currentColor), green(currentColor), blue(currentColor));
   closestPoint = new PVector();
  
  // get track color as vector
  PVector trackColorVec = new PVector(red(trackColor), green(trackColor), blue(trackColor));
   

 float diff = currColorVec.dist(trackColorVec);
      
      if (diff < worldRecord)// {go through image pixel by pixel
  for worldRecord(int x= diff0; x < video.width; x++) {
    for  closestPoint.x = x(int y=0; y < video.height; y++) {
    closestPoint.y = y;// get pixel location
   }   int loc }= x + }y * video.width;
 if (worldRecord < 10) { 
    fill(trackColor);  // get  strokeWeight(4.0);pixel color
    stroke(0);  color currentColor = ellipse(closestPoint.x, closestPoint.y, 50, 50)video.pixels[loc];

 } }  void mousePressed() {// get current intcolor locas =vector
mouseX + mouseY * video.width;  PVector trackColorcurrColorVec = video.pixels[loc];
}

Blob Detection

Image Removed

Die Blob Detection is schon ein komplexere Art von Algorithm, wo ein gesamtes Objekt (Blop) zu erkennen versucht wird.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;
Capture video;

BlobDetector blobDetector;
color blobColor; 

void setup() {
  size(640, 480);

  video = new Capture(this, width, height, 15);
  video.start();
  
  blobDetector = new BlobDetector();
  blobColor = color(255, 0, 0);

  smooth();
}

void draw() {
  if (video.available()) {
    video.read();
  }
  
  video.loadPixels();
  
  // for black and white image
  // video.filter(THRESHOLD,0.1);
  image(video, 0, 0);
  
  blobDetector.findBlob(blobColor, 20); 
  blobDetector.drawBlob();
  blobDetector.drawBoundingBox();
  blobDetector.drawCenterOfMass();
}

void mousePressed() {
  int loc = mouseX + mouseY*video.width;
  blobColor = video.pixels[loc];
}

class BlobDetector {
  int blobPoints[][];
  int blobWeight = 0;
  PVector centerOfMass;

  BlobDetector() {
    blobPoints = new int[width][height];
    centerOfMass = new PVector(0, 0);
  }

  void findBlob(color blobColor, int threshold) {
    blobWeight = 0;

    for (int x = 0; x < width; x ++ ) {
      for (int y = 0; y < height; y ++ ) {
        int loc = x + y*width;
        color currentColor = video.pixels[loc];

        PVector currColorVec = new PVector(red(currentColor), green(currentColor), blue(currentColor));
        PVector trackColorVec = new PVector(red(blobColor), green(blobColor), blue(blobColor));
        float diff = currColorVec.dist(trackColorVec);

        if (diff < threshold) {
          blobPoints[x][y] = 1;
          blobWeight++;
        } else {
          blobPoints[x][y] = 0;
        }
      }
    }
    
    text(blobWeight, 20, 20);
  }

  void drawBlob() {
    if (blobWeight > 200) {
      for (int x = 0; x < width; x ++ ) {
        for (int y = 0; y < height; y ++ ) {
          if (blobPoints[x][y] == 1) {
            strokeWeight(5);
            new PVector(red(currentColor), green(currentColor), blue(currentColor)); 
      
      // calculate distance between current color and track color
      float dist = currColorVec.dist(trackColorVec);

      // save point if closer than previous
      if (dist < record) {
        record = dist;
        closestPoint.x = x;
        closestPoint.y = y;
      }
    }
  }

  // draw point if we found a one that is less than 10 apart
  if (record < 10) {
    fill(trackColor);
    strokeWeight(4.0);
    stroke(0);
    ellipse(closestPoint.x, closestPoint.y, 50, 50);
  }
}

void mousePressed() {
  // save color of current pixel under the mouse
  int loc = mouseX + mouseY * video.width;
  trackColor = video.pixels[loc];
}

Blob Detection

Image Added

Die Blob Detection is schon ein komplexere Art von Algorithm, wo ein gesamtes Objekt (Blop) zu erkennen versucht wird.

Code Block
titleBeispiel
collapsetrue
import processing.video.*;

Capture video;

// the color to track
color trackColor;

// a dimensional array to store marked pixels
boolean marks[][];

// the total marked pixels
int total = 0;

// the most top left pixel
PVector topLeft;

// the most bottom right pixel
PVector bottomRight;

void setup() {
  size(640, 480);

  // start video capture
  video = new Capture(this, width, height, 15);
  video.start();

  // set initial track color to red
  trackColor = color(255, 0, 0);

  // initialize marks array
  marks = new boolean[width][height];
}

void draw() {
  // read video frame if available
  if (video.available()) {
    video.read();
  }

  // draw video image
  image(video, 0, 0);

  // find track color with treshold
  findBlob(20);

  // load canvas pixels
  loadPixels();

  // draw blob
  for (int x = 0; x < width; x ++ ) {
    for (int y = 0; y < height; y ++ ) {
      // get pixel location
      int loc = x + y*width;

      // make pixel red if marked
      if (marks[x][y]) {
        pixels[loc] = color(255, 0, 0);
      }
    }
  }

  // set canvas pixels
  updatePixels();

  // draw bounding box
  stroke(255, 0, 0);
   
  noFill();
  rect(topLeft.x,   point(topLeft.y, bottomRight.x-topLeft.x, bottomRight.y-topLeft.y);
}

void mousePressed() {
  // save current }pixel under mouse as track color
  int }loc = mouseX     }
+ mouseY*video.width;
  trackColor }
 = video.pixels[loc];
}



void drawBoundingBoxfindBlob(int threshold) {
  // reset PVectortotal
A = new PVector(width, height)total = 0;

  // PVectorprepare Bpoint =trackers
new PVector(0, 0);
    for (int xlowestX = 0width;
x < width;int xlowestY ++= )height;
{  int highestX = 0;
 for (int yhighestY = 0;

  y// <prepare height;track ycolor ++vector
) { PVector trackColorVec = new PVector(red(trackColor), green(trackColor), blue(trackColor));

if (blobPoints[x][y] == 1) {
    // go through image pixel by pixel
  for (int x = if0; (yx < A.y)
   width; x ++ ) {
    for (int   A.y = y0;           if (y > B.y)y < height; y ++ ) {
      // get pixel location
  B.y = y;  int loc = x + y*width;

  if (x < A.x) // get color of pixel
      color A.xcurrentColor = x;video.pixels[loc];

      // get vector of pixel ifcolor
(x > B.x)    PVector currColorVec = new PVector(red(currentColor), green(currentColor), blue(currentColor));

 B.x = x;   // get distance to track color
}      float }dist = currColorVec.dist(trackColorVec);
  }    
 strokeWeight(1);     // reset noFill();mark
    rect(A.x, A.y, B.x-A.x, B.y-A.y);
    text("A "+A.x+","+A.y, A.x, A.y);
    text("B "+B.x+","+B.y, B.x, B.y);
  }

  void drawCenterOfMass() {
    centerOfMass.set(0.0, 0.0, 0.0); marks[x][y] = false;

      // check if distance is below threshold
      if (dist < threshold) {
        // mark pixel 
      for (int marks[x][y] = 0true;
x < width; x ++ ) {  total++;

   for (int y = 0; y// < height; y ++ ) {update point trackers 
        if (blobPoints[x][y] == 1) {
 < lowestX) lowestX = x;
        if centerOfMass.x +(x > highestX) highestX = x;
        if (y < lowestY) centerOfMass.ylowestY += y;
        }if (y      }
    }
  > highestY) highestY = y;
      centerOfMass.x}
/= blobWeight;   }
 centerOfMass.y /=}
blobWeight;
  // save locations
noStroke();  topLeft = new fill(255PVector(lowestX, lowestY);
  bottomRight = new ellipsePVector(centerOfMass.xhighestX, centerOfMass.y, 10, 10highestY);
  }
}

Weitere Informationen