I am just curious about correctly using setOneShotPreview() callback and correctly using onPreviewFrame(byte[] data, Camera camera) to get an image as it is displayed in preview. I have implemented all the code and its working but the resulting image is of no use. Below is my code please help me with what i am doing wrong.
我只是想正确地使用setOneShotPreview()回调,并正确地使用onPreviewFrame(byte[] data, Camera Camera)来获得一个在预览中显示的图像。我已经实现了所有的代码及其工作原理,但是生成的映像没有用处。下面是我的代码,请帮助我做错了什么。
public class MainActivity extends ActionBarActivity {
private static final String TAG = "CamTestActivity";
Preview preview;
Button buttonClick;
Camera camera;
String fileName;
Activity act;
Context ctx;
int frontCamera;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
act = this;
//requestWindowFeature(Window.FEATURE_NO_TITLE);
//getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
preview = new Preview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
((FrameLayout) findViewById(R.id.preview)).addView(preview);
preview.setKeepScreenOn(true);
frontCamera = findFrontFacingCamera();
buttonClick = (Button) findViewById(R.id.button_capture);
//camera.setOneShotPreviewCallback(cameraPreviewCallback);
buttonClick.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
//camera.setOneShotPreviewCallback(cameraPreviewCallback);
//camera.takePicture(shutterCallback, rawCallback, jpegCallback);
camera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Camera.Parameters parameters = camera.getParameters();
int format = parameters.getPreviewFormat();
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
Log.d("imageFormat",Integer.toString(format));
Log.d("imageNV21",Integer.toString(ImageFormat.NV21));
Log.d("imageYUY2",Integer.toString(ImageFormat.YUY2));
Log.d("imageNV16",Integer.toString(ImageFormat.NV16));
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
try {
// Write to SD Card
File folder = new File(Environment.getExternalStorageDirectory().toString()+"/SilentCamera/Images");
if(folder.exists()){
Log.d("creating folder","Folder already exists");
//Save the path as a string value
String extStorageDirectory = folder.toString();
Log.d("DirPath",extStorageDirectory);
fileName = String.format(extStorageDirectory+"/%d.jpg", System.currentTimeMillis());
Log.d("ImagePath",fileName);
outStream = new FileOutputStream(fileName);
//Uri uriSavedImage = Uri.fromFile(file);
outStream.write(byt);
outStream.flush();
outStream.close();
Log.d("SilentCam","Frame Received");
}
else{
folder.mkdirs();
String extStorageDirectory = folder.toString();
Log.d("DirPath",extStorageDirectory);
fileName = String.format(extStorageDirectory+"/%d.jpg", System.currentTimeMillis());
Log.d("ImagePath",fileName);
outStream = new FileOutputStream(fileName);
outStream.write(byt);
outStream.close();
Log.d("PassMainCamera", "onPictureTaken - wrote bytes: " + data.length);
Log.d("ImageUrl","Image Url added to database");
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
});
//camera.startPreview();
}
});
}
@Override
protected void onResume() {
super.onResume();
camera = Camera.open(frontCamera);
camera.startPreview();
preview.setCamera(camera);
resetCam();
}
@Override
protected void onPause() {
if(camera != null) {
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void resetCam() {
camera.startPreview();
preview.setCamera(camera);
}
public static void setCameraDisplayOrientation(Context context,
int cameraId, android.hardware.Camera camera)
{
int result = MainActivity.getCameraDisplayOrientation(context, cameraId, camera);
if (android.os.Build.VERSION.SDK_INT <= 14)
{
camera.stopPreview();
camera.setDisplayOrientation(result);
camera.startPreview();
Log.i("Version<=14","inSettingOrirntation");
} else
{
camera.setDisplayOrientation(90);
Log.i("Version>14","inSettingOrirntation");
}
}// end setCameraDisplayOrientation
public static int getCameraDisplayOrientation(Context context,
int cameraId, android.hardware.Camera camera)
{
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = ((Activity) context).getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation)
{
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else
{ // back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
// frinding id of front facing camera
private int findFrontFacingCamera() {
int foundId = -1;
int numCams = Camera.getNumberOfCameras();
for (int camId = 0; camId < numCams; camId++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(camId, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
foundId = camId;
break;
}
}
return foundId;
}
}
}
And this is my surface holder implementation,,
这是我的surface holder实现,
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
private Activity activity;
Context context;
Preview(Context _context, SurfaceView sv) {
super(_context);
context = _context;
mSurfaceView = sv;
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params);
}
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
// mCamera.startPreview();
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
// mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
String sceneMode = "SCENE_MODE_PORTRAIT";
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
parameters.setPictureSize(mPreviewSize.width, mPreviewSize.height);
Log.d("Preview Width",Integer.toString(mPreviewSize.width));
Log.d("Preview Height",Integer.toString(mPreviewSize.height));
Log.d("Preview Zoom",Integer.toString(parameters.getZoom()));
//parameters.setSceneMode(sceneMode);
//parameters.se
requestLayout();
//MainActivity.setCameraDisplayOrientation(context,Camera.CameraInfo.CAMERA_FACING_FRONT, mCamera);
mCamera.setParameters(parameters);
//mCamera.startPreview();
//mCamera.setDisplayOrientation(90);
}
}
}
}
This is all my code,,, Its been 3 days and still stuck at this problem,,,, every one says its working but not with me,,, i am testing it on samsung galaxy s with android 2.3.6 gingerbread,,,, please help me Thanks.
这是我所有的代码,已经3天了,还在解决这个问题,每个人都说它可以工作,但我没有,我正在三星galaxy s上用android 2.3.6姜饼做测试,请帮助我谢谢。
1 个解决方案
#1
1
This is because the data provided as parameter is not in jpeg format. You need to convert it first:
这是因为作为参数提供的数据不是jpeg格式的。你需要先转换它:
Camera.Parameters parameters = camera.getParameters();
final int format = parameters.getPreviewFormat();
YuvImage im = new YuvImage( data, format, width, height, null );
ByteArrayOutputStream out = new ByteArrayOutputStream();
final Rect rect = new Rect( 0, 0, width, height );
im.compressToJpeg( rect, 100, out );
byte[] jpeg = out.toByteArray();
#1
1
This is because the data provided as parameter is not in jpeg format. You need to convert it first:
这是因为作为参数提供的数据不是jpeg格式的。你需要先转换它:
Camera.Parameters parameters = camera.getParameters();
final int format = parameters.getPreviewFormat();
YuvImage im = new YuvImage( data, format, width, height, null );
ByteArrayOutputStream out = new ByteArrayOutputStream();
final Rect rect = new Rect( 0, 0, width, height );
im.compressToJpeg( rect, 100, out );
byte[] jpeg = out.toByteArray();