从相机预览问题获取的位图中读取像素
我正在创建一个 Android 应用程序,它要求我创建相机预览的位图,以便我可以平均预览中心的像素,但是无论相机指向什么,应用程序始终返回 0, 0, 0,作为从屏幕中心的 9x9 正方形的平均值读取的 RGB 值。 Preview.java 和主 java 文件都在那里。这是 android 的问题(如果是的话有办法解决吗),还是我的代码有问题?谢谢! (抱歉,如果代码有点混乱)
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.FrameLayout;
public class CameraTest2Activity extends Activity {
private static final String TAG = "ZTG";
Camera camera;
Preview preview;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = new Preview(this);
((FrameLayout) findViewById(R.id.preview)).addView(preview);
final Button button = (Button) findViewById(R.id.bFire);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
long timeAtStart = System.currentTimeMillis();
TextView rgbDisplay = (TextView) findViewById(R.id.RGBText);
Display display = getWindowManager().getDefaultDisplay();
int width = display.getWidth();
int height = display.getHeight();
Log.d(TAG, "Width and Height Retrieved As: " + width + ", " + height);
Bitmap b = Bitmap.createBitmap(width, height, Bitmap.Config. RGB_565);
Canvas c = new Canvas(b);
Preview view = (Preview) ((ViewGroup) findViewById(R.id.preview)).getChildAt(0);
view.draw(c);
String hexValue;
int centerX = (display.getWidth() / 2);
int centerY = (display.getHeight() / 2);
int test;
//test = b.getPixel(240, 350);
int sampleWidth = 9;
int sampleHeight = 9;
int[] pixels = new int[sampleWidth * sampleHeight];
b.getPixels(pixels, 0, 9, centerX - 4, centerY - 4, sampleWidth, sampleHeight);
int l = 0;
int tempNum;
int tempBlue;
int tempRed;
int tempGreen;
int blue = 0; //Color.blue(test);
int red = 0; //Color.red(test);
int green = 0; // Color.green(test);
test = 0;
Log.d("lookingFor", "test: " + pixels[1]);
while(l < 81){
tempNum = (Integer) pixels[l];
Log.d("lookingFor", "Pixel Num: " + Color.blue(tempNum));
tempBlue = Color.blue(tempNum);
tempRed = Color.red(tempNum);
tempGreen = Color.green(tempNum);
Log.d("lookingFor", "current Blue: " + tempBlue);
blue = blue + tempBlue;
Log.d("lookingFor", "added blue: " + blue);
red = red + tempRed;
green = green + tempGreen;
l = l + 1; //test g
}
Log.v("lookingFor", blue + " " + red + " " + green);
blue = blue / 81;
red = red / 81;
green = green / 81;
hexValue = Integer.toHexString(test);
Log.d(TAG, "pixel at (" + centerX + ", " + centerY + " succesfully retreived! with value of: " + test);
Log.d(TAG, "and an Hex value of: " + hexValue);
// blue = Color.blue(test);
// red = Color.red(test);
// green = Color.green(test);
//this is a modification
Log.d(TAG, "RGB COLOR! R:" + red + " G:" + green + " B:" + blue);
long timeAtEnd = System.currentTimeMillis();
long totalTime = timeAtEnd - timeAtStart;
Log.d(TAG, "Fetching the color took " + totalTime + " milliseconds");
rgbDisplay.setText("R:" + red + " G:" + green + " B:" + blue);
}
});
Log.d(TAG, "onCreate'd");
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream =
// CameraDemo.this.openFileOutput(String.format("%d.jpg",
// System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
(这是主java文件)
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
class Preview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "Preview";
SurfaceHolder mHolder;
public Camera camera;
Preview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
/*FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}*/
Preview.this.invalidate();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
camera.stopPreview();
camera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
camera.startPreview();
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
I'm creating an Android application that requires me to create a bitmap of the camera preview so that I can average the pixels in the center of the preview, however no matter what the camera is pointed at, the app always returns 0, 0, 0, as the RGB value read from an average of a 9x9 square in the center of the screen. Both preview.java, and the main java file are there. Is this an issue with android (and if so is there a way around it), or is it an issue with my code? Thanks! (sorry if the code is slightly messy)
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.FrameLayout;
public class CameraTest2Activity extends Activity {
private static final String TAG = "ZTG";
Camera camera;
Preview preview;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = new Preview(this);
((FrameLayout) findViewById(R.id.preview)).addView(preview);
final Button button = (Button) findViewById(R.id.bFire);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
long timeAtStart = System.currentTimeMillis();
TextView rgbDisplay = (TextView) findViewById(R.id.RGBText);
Display display = getWindowManager().getDefaultDisplay();
int width = display.getWidth();
int height = display.getHeight();
Log.d(TAG, "Width and Height Retrieved As: " + width + ", " + height);
Bitmap b = Bitmap.createBitmap(width, height, Bitmap.Config. RGB_565);
Canvas c = new Canvas(b);
Preview view = (Preview) ((ViewGroup) findViewById(R.id.preview)).getChildAt(0);
view.draw(c);
String hexValue;
int centerX = (display.getWidth() / 2);
int centerY = (display.getHeight() / 2);
int test;
//test = b.getPixel(240, 350);
int sampleWidth = 9;
int sampleHeight = 9;
int[] pixels = new int[sampleWidth * sampleHeight];
b.getPixels(pixels, 0, 9, centerX - 4, centerY - 4, sampleWidth, sampleHeight);
int l = 0;
int tempNum;
int tempBlue;
int tempRed;
int tempGreen;
int blue = 0; //Color.blue(test);
int red = 0; //Color.red(test);
int green = 0; // Color.green(test);
test = 0;
Log.d("lookingFor", "test: " + pixels[1]);
while(l < 81){
tempNum = (Integer) pixels[l];
Log.d("lookingFor", "Pixel Num: " + Color.blue(tempNum));
tempBlue = Color.blue(tempNum);
tempRed = Color.red(tempNum);
tempGreen = Color.green(tempNum);
Log.d("lookingFor", "current Blue: " + tempBlue);
blue = blue + tempBlue;
Log.d("lookingFor", "added blue: " + blue);
red = red + tempRed;
green = green + tempGreen;
l = l + 1; //test g
}
Log.v("lookingFor", blue + " " + red + " " + green);
blue = blue / 81;
red = red / 81;
green = green / 81;
hexValue = Integer.toHexString(test);
Log.d(TAG, "pixel at (" + centerX + ", " + centerY + " succesfully retreived! with value of: " + test);
Log.d(TAG, "and an Hex value of: " + hexValue);
// blue = Color.blue(test);
// red = Color.red(test);
// green = Color.green(test);
//this is a modification
Log.d(TAG, "RGB COLOR! R:" + red + " G:" + green + " B:" + blue);
long timeAtEnd = System.currentTimeMillis();
long totalTime = timeAtEnd - timeAtStart;
Log.d(TAG, "Fetching the color took " + totalTime + " milliseconds");
rgbDisplay.setText("R:" + red + " G:" + green + " B:" + blue);
}
});
Log.d(TAG, "onCreate'd");
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream =
// CameraDemo.this.openFileOutput(String.format("%d.jpg",
// System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
(thats the main java file)
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
class Preview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "Preview";
SurfaceHolder mHolder;
public Camera camera;
Preview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
/*FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}*/
Preview.this.invalidate();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
camera.stopPreview();
camera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
camera.startPreview();
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
您的代码对您创建的新位图
b
中的像素值进行平均,而不是相机预览或相机图片返回的像素。我认为您需要将传递给 onPreviewFrame 的字节数组,将其从预览格式(很可能是 YUV422 或 YUV420)转换为您最喜欢的 RGB 格式,然后对 RGB 颜色进行数学计算。
Your code averages the pixel values in your new bitmap
b
that you created and not the pixels returned by the camera preview or the camera picture.I think you'll need to take the byte array passed to onPreviewFrame, convert it from the preview format (most likely YUV422 or YUV420) to your favorite RGB format and then do your math on the RGB colors.