使用网络摄像头进行 Java 运动检测
我所在的小组试图检测人们在空间中行走的运动,然后为每个单独的人在“水族馆”中展示一条鱼。现在我们使用Processing 和OpenCV 进行面部检测,然后为每张脸创建一条鱼。
问题是系统以非常低的帧速率运行。我们需要将帧速率提高一个很大的系数,这就是为什么我们试图弄清楚如何使用运动检测。
我们的老师建议我们在处理中使用线程并减少对人员的检查,但我们真的对如何进行线程感到困惑。
任何关于我们应该如何处理这个问题的建议都将非常感激。谢谢。
这是代码:
import hypermedia.video.*; // Imports the OpenCV library
import java.awt.Rectangle;
import ddf.minim.analysis.*;
import ddf.minim.*;
OpenCV opencv; // Creates a new OpenCV object
Minim minim;
AudioInput in;
FFT fft;
int w;
PImage fade;
PImage movementImg; // Creates a new PImage to hold the movement image
ArrayList bubbles; // Creates an ArrayList to hold the Bubble objects
PImage bubblePNG; // Creates a PImage that will hold the image of the bubble
int randPos;
PImage fishImg;
PImage fishImg2;
PImage fishImg3;
PImage fishImg4;
PImage fishImg5;
PImage sharkImg;
PImage clockImg;
PImage backImg;
int sharkX=480;
int sharkY=height/2;
int sharkMoves = 480;
int sharkSpeed=40;
int flagForShark=0;
int flagForNotification=0;
ArrayList psystems;
int NotificationX = 10;
int NotificationY = 10;
//clock
int cx, cy;
float secondsRadius;
float minutesRadius;
float hoursRadius;
float clockDiameter;
void setup(){
size ( 640, 480 ); // Window size of 640 x 480
opencv = new OpenCV( this ); // Initialises the OpenCV library
opencv.capture( 640, 480 ); // Sets the capture size to 640 x 480
opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT ); //// load the FRONTALFACE description file
movementImg = new PImage(640, 480 ); // Initialises the PImage that holds the movement image
bubbles = new ArrayList(); // Initialises the ArrayList
bubblePNG = loadImage("bubble.png"); // Load the bubble image into memory
smooth();
fishImg = loadImage("purpleFish.png");
fishImg2 = loadImage("fish2.png");
fishImg3 = loadImage("fish3.png");
fishImg4 = loadImage("fish4.png");
fishImg5 = loadImage("fish5.png");
sharkImg = loadImage("shark.png");
clockImg = loadImage("clock.png");
backImg = loadImage("bg01.png");
fill(61,36,9);
int radius = min(100, 100) / 2;
secondsRadius = radius * 0.72;
minutesRadius = radius * 0.60;
hoursRadius = radius * 0.50;
clockDiameter = radius * 1.8;
cx = 50;
cy = 50;
//Sound stuff
minim = new Minim(this);
in = minim.getLineIn(Minim.STEREO, 512);
fft = new FFT(in.bufferSize(),in.sampleRate());
fft.logAverages(60,7);
}
void youareloud(){
fft.forward(in.mix);
for(int i=0; i<fft.avgSize();i++){
if(fft.getAvg(i) > 3){
randPos = 160*(int)random(0, 5);
bubbles.add(new Bubble( randPos+(int)random(-10, 10), 480, (int)random(10,25), (int)random(10,25))); // Adds a new bubble to the array with a random x position
}
}
for ( int i = 0; i < bubbles.size(); i++ ){ // For every bubble in the bubbles array
Bubble _bubble = (Bubble) bubbles.get(i); // Copies the current bubble into a temporary object
if(_bubble.update() == 1){ // If the bubble's update function returns '1'
bubbles.remove(i); // then remove the bubble from the array
_bubble = null; // and make the temporary bubble object null
i--; // since we've removed a bubble from the array, we need to subtract 1 from i, or we'll skip the next bubble
}else{ // If the bubble's update function doesn't return '1'
bubbles.set(i, _bubble); // Copys the updated temporary bubble object back into the array
_bubble = null; // Makes the temporary bubble object null.
}
}
}
void draw(){
opencv.read(); // Captures a frame from the camera
opencv.flip(OpenCV.FLIP_HORIZONTAL); // Flips the image horizontally
// background(loadImage("data/underwater_640x480_stretched.jpg"));//drwa detected environemtn
background(backImg);
faces();
youareloud();
extras();
}
class Bubble{
int bubbleX, bubbleY, bubbleWidth, bubbleHeight; //Some variables to hold information about the bubble
int randSize = (int)random(10, 20);
Bubble ( int bX, int bY, int bW, int bH ){ //The class constructor- sets the values when a new bubble object is made
bubbleX = bX;
bubbleY = bY;
bubbleWidth = bW;
bubbleHeight = bH;
}
int update(){ //The Bubble update function
int movementAmount; //Create and set a variable to hold the amount of white pixels detected in the area where the bubble is
movementAmount = 0;
for( int y = bubbleY; y < (bubbleY + (bubbleHeight-1)); y++ ){
//For loop that cycles through all of the pixels in the area the bubble occupies
for( int x = bubbleX; x < (bubbleX + (bubbleWidth-1)); x++ ){
if ( x < width && x > 0 && y < height && y > 0 ){
//If the current pixel is within the screen bondaries
if (brightness(movementImg.pixels[x + (y * width)]) > 127){
//and if the brightness is above 127 (in this case, if it is white)
movementAmount++;
//Add 1 to the movementAmount variable.
}
}
}
}
if (movementAmount > 5){ // If more than 5 pixels of movement are detected in the bubble area
//poppedBubbles++; // Add 1 to the variable that holds the number of popped bubbles
return 1; // Return 1 so that the bubble object is destroyed
} else { // If less than 5 pixels of movement are detected,
//bubbleY += 10; // increase the y position of the bubble so that it falls down
bubbleY -= 10; // increase the y position of the bubble so that it falls down
if (bubbleY < 0){ // If the bubble has dropped off of the bottom of the screen
return 1; // Return '1' so that the bubble object is destroyed
}
image(bubblePNG, bubbleX, bubbleY,randSize,randSize); // Draws the bubble to the screen
return 0; // Returns '0' so that the bubble isn't destroyed
}
}
}
void faces(){
Rectangle[] faces = opencv.detect();
noFill();
stroke(255,0,0);
opencv.absDiff(); // Creates a difference image
opencv.convert(OpenCV.GRAY); // Converts to greyscale
opencv.blur(OpenCV.BLUR, 3); // Blur to remove camera noise
opencv.threshold(20); // Thresholds to convert to black and white
movementImg = opencv.image(); // Puts the OpenCV buffer into an image object
opencv.remember(OpenCV.SOURCE, OpenCV.FLIP_HORIZONTAL); // Remembers the camera image so we can generate a difference image next frame. Since we've
for( int i=0; i<faces.length; i++ ) {
//image( opencv.image(), faces[i].x, faces[i].y, faces[i].width, faces[i].height ); // display the image in memory on the right
// opencv.loadImage( "/Users/sang/Desktop/home.png", ); // load image from file
// opencv.convert( GRAY );
// opencv.ROI( faces[i].x, faces[i].y, faces[i].width, faces[i].height );
// opencv.brightness( 80 );
// opencv.contrast( 90 );
if(i==0)
{ image( fishImg,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==1)
{ image( fishImg2,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==2)
{ image( fishImg3,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==3)
{ image( fishImg4,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==4)
{ image( fishImg5,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
}
}
void extras(){
if(keyPressed){
if (key == 's' || key == 'S'){
flagForShark=1;
} else if(key=='n' || key =='N'){
flagForNotification=1;
} else if(key=='x' || key =='x'){
flagForNotification=0;
}
}
if(flagForShark==1){
// fill(255, 204, 255);
// stroke(128, 0, 128);
image( sharkImg,sharkMoves,sharkY);
//ellipse(candyX,candyY+candyMoves, 55, 55);
//image(loadImage("/Users/sang/Desktop/candy.png"),candyX,candyY+candyMoves);
if(sharkMoves>0){
sharkMoves-=sharkSpeed;
} else {
sharkMoves=480;
flagForShark=0;
}
}
if(flagForNotification==1){
image(sharkImg,NotificationX,NotificationY);
}
//Clock
// Draw the clock background
// fill(80);
noStroke();
// ellipse(cx, cy, clockDiameter, clockDiameter);
image(clockImg,5,5,clockDiameter,clockDiameter);
// Angles for sin() and cos() start at 3 o'clock;
// subtract HALF_PI to make them start at the top
float s = map(second(), 0, 60, 0, TWO_PI) - HALF_PI;
float m = map(minute() + norm(second(), 0, 60), 0, 60, 0, TWO_PI) - HALF_PI;
float h = map(hour() + norm(minute(), 0, 60), 0, 24, 0, TWO_PI * 2) - HALF_PI;
// Draw the hands of the clock
stroke(61,36,9);
strokeWeight(1);
line(cx, cy, cx + cos(s) * secondsRadius, cy + sin(s) * secondsRadius);
strokeWeight(2);
line(cx, cy, cx + cos(m) * minutesRadius, cy + sin(m) * minutesRadius);
strokeWeight(4);
line(cx, cy, cx + cos(h) * hoursRadius, cy + sin(h) * hoursRadius);
// Draw the minute ticks
// strokeWeight(2);
// beginShape(POINTS);
// for (int a = 0; a < 360; a+=6) {
// float x = cx + cos(radians(a)) * secondsRadius;
// float y = cy + sin(radians(a)) * secondsRadius;
// vertex(x, y);
// }
endShape();
//end of clock
}
I'm in a group that is trying to detect motion from people walking through a space and then for each separate person, to display a fish in an "aquarium". Right now we're using Processing and OpenCV to do facial detection and then for each face, we create a fish.
The problem is that the system runs at a really low frame rate. We need to get the frame rate up by a large factor, which is why we're trying to figure out how to use motion detection.
Our teacher suggested that we use threading in processing and check less often for people, but we're really confused by how to do the threading.
Any suggestions about how we should handle this would be really really appreciated. Thank you.
Here is the code:
import hypermedia.video.*; // Imports the OpenCV library
import java.awt.Rectangle;
import ddf.minim.analysis.*;
import ddf.minim.*;
OpenCV opencv; // Creates a new OpenCV object
Minim minim;
AudioInput in;
FFT fft;
int w;
PImage fade;
PImage movementImg; // Creates a new PImage to hold the movement image
ArrayList bubbles; // Creates an ArrayList to hold the Bubble objects
PImage bubblePNG; // Creates a PImage that will hold the image of the bubble
int randPos;
PImage fishImg;
PImage fishImg2;
PImage fishImg3;
PImage fishImg4;
PImage fishImg5;
PImage sharkImg;
PImage clockImg;
PImage backImg;
int sharkX=480;
int sharkY=height/2;
int sharkMoves = 480;
int sharkSpeed=40;
int flagForShark=0;
int flagForNotification=0;
ArrayList psystems;
int NotificationX = 10;
int NotificationY = 10;
//clock
int cx, cy;
float secondsRadius;
float minutesRadius;
float hoursRadius;
float clockDiameter;
void setup(){
size ( 640, 480 ); // Window size of 640 x 480
opencv = new OpenCV( this ); // Initialises the OpenCV library
opencv.capture( 640, 480 ); // Sets the capture size to 640 x 480
opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT ); //// load the FRONTALFACE description file
movementImg = new PImage(640, 480 ); // Initialises the PImage that holds the movement image
bubbles = new ArrayList(); // Initialises the ArrayList
bubblePNG = loadImage("bubble.png"); // Load the bubble image into memory
smooth();
fishImg = loadImage("purpleFish.png");
fishImg2 = loadImage("fish2.png");
fishImg3 = loadImage("fish3.png");
fishImg4 = loadImage("fish4.png");
fishImg5 = loadImage("fish5.png");
sharkImg = loadImage("shark.png");
clockImg = loadImage("clock.png");
backImg = loadImage("bg01.png");
fill(61,36,9);
int radius = min(100, 100) / 2;
secondsRadius = radius * 0.72;
minutesRadius = radius * 0.60;
hoursRadius = radius * 0.50;
clockDiameter = radius * 1.8;
cx = 50;
cy = 50;
//Sound stuff
minim = new Minim(this);
in = minim.getLineIn(Minim.STEREO, 512);
fft = new FFT(in.bufferSize(),in.sampleRate());
fft.logAverages(60,7);
}
void youareloud(){
fft.forward(in.mix);
for(int i=0; i<fft.avgSize();i++){
if(fft.getAvg(i) > 3){
randPos = 160*(int)random(0, 5);
bubbles.add(new Bubble( randPos+(int)random(-10, 10), 480, (int)random(10,25), (int)random(10,25))); // Adds a new bubble to the array with a random x position
}
}
for ( int i = 0; i < bubbles.size(); i++ ){ // For every bubble in the bubbles array
Bubble _bubble = (Bubble) bubbles.get(i); // Copies the current bubble into a temporary object
if(_bubble.update() == 1){ // If the bubble's update function returns '1'
bubbles.remove(i); // then remove the bubble from the array
_bubble = null; // and make the temporary bubble object null
i--; // since we've removed a bubble from the array, we need to subtract 1 from i, or we'll skip the next bubble
}else{ // If the bubble's update function doesn't return '1'
bubbles.set(i, _bubble); // Copys the updated temporary bubble object back into the array
_bubble = null; // Makes the temporary bubble object null.
}
}
}
void draw(){
opencv.read(); // Captures a frame from the camera
opencv.flip(OpenCV.FLIP_HORIZONTAL); // Flips the image horizontally
// background(loadImage("data/underwater_640x480_stretched.jpg"));//drwa detected environemtn
background(backImg);
faces();
youareloud();
extras();
}
class Bubble{
int bubbleX, bubbleY, bubbleWidth, bubbleHeight; //Some variables to hold information about the bubble
int randSize = (int)random(10, 20);
Bubble ( int bX, int bY, int bW, int bH ){ //The class constructor- sets the values when a new bubble object is made
bubbleX = bX;
bubbleY = bY;
bubbleWidth = bW;
bubbleHeight = bH;
}
int update(){ //The Bubble update function
int movementAmount; //Create and set a variable to hold the amount of white pixels detected in the area where the bubble is
movementAmount = 0;
for( int y = bubbleY; y < (bubbleY + (bubbleHeight-1)); y++ ){
//For loop that cycles through all of the pixels in the area the bubble occupies
for( int x = bubbleX; x < (bubbleX + (bubbleWidth-1)); x++ ){
if ( x < width && x > 0 && y < height && y > 0 ){
//If the current pixel is within the screen bondaries
if (brightness(movementImg.pixels[x + (y * width)]) > 127){
//and if the brightness is above 127 (in this case, if it is white)
movementAmount++;
//Add 1 to the movementAmount variable.
}
}
}
}
if (movementAmount > 5){ // If more than 5 pixels of movement are detected in the bubble area
//poppedBubbles++; // Add 1 to the variable that holds the number of popped bubbles
return 1; // Return 1 so that the bubble object is destroyed
} else { // If less than 5 pixels of movement are detected,
//bubbleY += 10; // increase the y position of the bubble so that it falls down
bubbleY -= 10; // increase the y position of the bubble so that it falls down
if (bubbleY < 0){ // If the bubble has dropped off of the bottom of the screen
return 1; // Return '1' so that the bubble object is destroyed
}
image(bubblePNG, bubbleX, bubbleY,randSize,randSize); // Draws the bubble to the screen
return 0; // Returns '0' so that the bubble isn't destroyed
}
}
}
void faces(){
Rectangle[] faces = opencv.detect();
noFill();
stroke(255,0,0);
opencv.absDiff(); // Creates a difference image
opencv.convert(OpenCV.GRAY); // Converts to greyscale
opencv.blur(OpenCV.BLUR, 3); // Blur to remove camera noise
opencv.threshold(20); // Thresholds to convert to black and white
movementImg = opencv.image(); // Puts the OpenCV buffer into an image object
opencv.remember(OpenCV.SOURCE, OpenCV.FLIP_HORIZONTAL); // Remembers the camera image so we can generate a difference image next frame. Since we've
for( int i=0; i<faces.length; i++ ) {
//image( opencv.image(), faces[i].x, faces[i].y, faces[i].width, faces[i].height ); // display the image in memory on the right
// opencv.loadImage( "/Users/sang/Desktop/home.png", ); // load image from file
// opencv.convert( GRAY );
// opencv.ROI( faces[i].x, faces[i].y, faces[i].width, faces[i].height );
// opencv.brightness( 80 );
// opencv.contrast( 90 );
if(i==0)
{ image( fishImg,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==1)
{ image( fishImg2,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==2)
{ image( fishImg3,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==3)
{ image( fishImg4,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
else if(i==4)
{ image( fishImg5,faces[i].x, faces[i].y, faces[i].width, faces[i].height); }
}
}
void extras(){
if(keyPressed){
if (key == 's' || key == 'S'){
flagForShark=1;
} else if(key=='n' || key =='N'){
flagForNotification=1;
} else if(key=='x' || key =='x'){
flagForNotification=0;
}
}
if(flagForShark==1){
// fill(255, 204, 255);
// stroke(128, 0, 128);
image( sharkImg,sharkMoves,sharkY);
//ellipse(candyX,candyY+candyMoves, 55, 55);
//image(loadImage("/Users/sang/Desktop/candy.png"),candyX,candyY+candyMoves);
if(sharkMoves>0){
sharkMoves-=sharkSpeed;
} else {
sharkMoves=480;
flagForShark=0;
}
}
if(flagForNotification==1){
image(sharkImg,NotificationX,NotificationY);
}
//Clock
// Draw the clock background
// fill(80);
noStroke();
// ellipse(cx, cy, clockDiameter, clockDiameter);
image(clockImg,5,5,clockDiameter,clockDiameter);
// Angles for sin() and cos() start at 3 o'clock;
// subtract HALF_PI to make them start at the top
float s = map(second(), 0, 60, 0, TWO_PI) - HALF_PI;
float m = map(minute() + norm(second(), 0, 60), 0, 60, 0, TWO_PI) - HALF_PI;
float h = map(hour() + norm(minute(), 0, 60), 0, 24, 0, TWO_PI * 2) - HALF_PI;
// Draw the hands of the clock
stroke(61,36,9);
strokeWeight(1);
line(cx, cy, cx + cos(s) * secondsRadius, cy + sin(s) * secondsRadius);
strokeWeight(2);
line(cx, cy, cx + cos(m) * minutesRadius, cy + sin(m) * minutesRadius);
strokeWeight(4);
line(cx, cy, cx + cos(h) * hoursRadius, cy + sin(h) * hoursRadius);
// Draw the minute ticks
// strokeWeight(2);
// beginShape(POINTS);
// for (int a = 0; a < 360; a+=6) {
// float x = cx + cos(radians(a)) * secondsRadius;
// float y = cy + sin(radians(a)) * secondsRadius;
// vertex(x, y);
// }
endShape();
//end of clock
}
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。
绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
我在你的草图中看到的问题是你的分析和渲染以相同的帧速率运行。在解决延迟问题时,我会提出一种分而治之的方法,即构建两个由 OSC 链接的草图。 OSC 在单台机器上运行的速度足够快,不会引入额外的延迟问题。以下是我构建两个草图的方法:
一个处理草图,它接收图像数据并进行面部跟踪。该草图将每 n 秒触发一个 OSC 事件。每个事件都是一个吸引鱼的位置列表。将其设置为 4 帧/秒即可启动。
一个处理草图,它监听草图 1 中的 OSC 事件并每 n 秒渲染一次水族箱。将其设置为 15 帧/秒以启动。
我在类似的问题中使用了这种方法(跟踪帧内的最亮点,当点击中特定坐标时触发声音事件),并且将分析率与渲染率分开调整是值得的。
希望这会有所帮助 - 如果您在 OSC 方面遇到问题,请回击。
The problem I see with your sketch is that you've got your analysis and rendering running at the same frame rate. In troubleshooting latency issues, I would propose a divide-and-conquer approach by building two sketches linked by OSC. OSC running on a single machine is quick enough that it won't introduce additional latency problems. Here's how I'd build the two sketches:
a processing sketch which takes image data in and does face tracking. This sketch would fire an OSC event each n seconds. Each event would be a list of locations to draw fish. Set this to 4 frames/sec to start.
a processing sketch which listens for OSC events from sketch 1 and renders the aquarium each n seconds. Set this to 15 frames/sec to start.
I've used this approach in a similar problem (tracking the brightest point within a frame, triggering sound events when point hits certain coordinates) and it paid to tune the analysis rate separately from the rendering rate.
Hope that helps - fire back if you have trouble with OSC.