cam_thread를 실행할 때마다 "app passed NULL surface"오류가 표시됩니다. 이 코드는 HTC Incredible 1에서 작동합니다. 드로이드 x에서 실행되도록 약간 재구성했습니다. 그러나 나는 아직도이 오류가 발생합니다.Android 카메라 - 앱이 NULL 표면을 전달했습니다.
public class Android_Activity extends Activity
{
Main_thread simulator;
ToggleButton togglebutton;
EditText ip_text;
SensorManager sm = null;
SurfaceView view;
Sensors_thread the_sensors=null;
String IP_address;
Android_Activity the_app;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
view = new SurfaceView(this);
sm = (SensorManager) getSystemService(SENSOR_SERVICE);
ip_text = (EditText) findViewById(R.id.IP_edit_txt);
togglebutton = (ToggleButton) findViewById(R.id.CameraButton);
togglebutton.setOnClickListener(new btn_listener());
the_app = this;
}
@Override
protected void onResume()
{
super.onResume();
}
protected void onStop()
{
super.onStop();
simulator.stop_simu();
this.finish();
}
private class btn_listener implements OnClickListener
{
public void onClick(View v)
{
// Perform action on clicks
if (togglebutton.isChecked())
{
IP_address = ip_text.getText().toString();
simulator = new Main_thread(the_app, view, sm, IP_address);
the_sensors = simulator.the_sensors;
sm.registerListener(the_sensors,
SensorManager.SENSOR_ORIENTATION |SensorManager.SENSOR_ACCELEROMETER,
SensorManager.SENSOR_DELAY_UI);
simulator.start();
Toast.makeText(Android_Activity.this, "Start streaming" + IP_address, Toast.LENGTH_SHORT).show();
} else
{
simulator.stop_simu();
sm.unregisterListener(the_sensors);
Toast.makeText(Android_Activity.this, "Stop streaming", Toast.LENGTH_SHORT).show();
}
}
}
}
메인 스레드
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="carl.IOIO_car"
android:versionCode="1"
android:versionName="1.0">
<uses-sdk android:minSdkVersion="8" />
<application android:icon="@drawable/icon" android:debuggable="true" android:label="@string/app_name">
<activity android:name="carl.IOIO_car.Android_Activity"
android:label="@string/app_name"
android:theme="@android:style/Theme.NoTitleBar.Fullscreen"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"></uses-permission>
<uses-permission android:name="android.permission.UPDATE_DEVICE_STATS"></uses-permission>
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE"></uses-permission>
<uses-permission android:name="android.permission.CHANGE_WIFI_MULTICAST_STATE"></uses-permission>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
</manifest>
package carl.IOIO_car;
import java.io.ByteArrayOutputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.util.List;
import android.graphics.Bitmap;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceView;
public class Cam_thread
{
Camera mCamera;
public static int HEADER_SIZE = 5;
public static int DATAGRAM_MAX_SIZE = 1450 - HEADER_SIZE;
int frame_nb = 0;
int size_packet_sent = 0;
InetAddress serverAddr;
DatagramSocket socket;
Bitmap mBitmap;
int[] mRGBData;
int width_ima, height_ima;
private static final String TAG = "IP_cam";
SurfaceView parent_context;
private boolean STOP_THREAD;
String ip_address;
public Cam_thread(SurfaceView context, String ip)
{
parent_context = context;
ip_address = ip;
}
private void init()
{
try
{
serverAddr = InetAddress.getByName(ip_address);
socket = new DatagramSocket();
if (mCamera!=null){
Log.e(TAG, "Nulling camera");
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera=null;
}
if(mCamera == null){
mCamera = Camera.open();
Log.e(TAG, "Setting up camera");
Camera.Parameters parameters = mCamera.getParameters();
//get a list of supported preview sizes and assign one
List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
Camera.Size previewSize = previewSizes.get(0);
parameters.setPreviewSize(previewSize.width, previewSize.height);
//Set Frame rate
parameters.setPreviewFrameRate(30);
//Set Scene
List<String> modes = parameters.getSupportedSceneModes();
parameters.setSceneMode(modes.get(0));
//Set focus mode
List<String> focus = parameters.getSupportedFocusModes();
parameters.setFocusMode(focus.get(0));
//Apply parameters to camera object
mCamera.setParameters(parameters);
//Provide a surface
if(parent_context.getHolder()==null)
Log.e(TAG, "Its a null holder");
Log.e("Debug", "Before");
mCamera.setPreviewDisplay(parent_context.getHolder());
Log.e("Debug", "After");
//Sets a call when preview data is available
mCamera.setPreviewCallback(new cam_PreviewCallback());
Log.e(TAG, "Camera configured");
//Start the preview
Log.e(TAG, "Starting preview");
mCamera.startPreview();
/*
parameters.setPreviewSize(320, 240);
parameters.setPreviewFrameRate(30);
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_SPORTS);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(parent_context.getHolder());
mCamera.setPreviewCallback(new cam_PreviewCallback());
Log.e(TAG, "Starting preview");
mCamera.startPreview();
*/
}
}
catch (Exception exception)
{
Log.e(TAG, "Error: ", exception);
}
}
public void start_thread()
{
Log.e("Cam", "Started the Cam thread");
init();
}
public void stop_thread()
{
STOP_THREAD = true;
if (mCamera!=null){
mCamera.stopPreview();
mCamera.release();
mCamera=null;
}
socket.close();
}
public void send_data_UDP()
{
Log.e(TAG, "Started sending cam data");
if(mBitmap != null)
{
int size_p=0,i;
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, 50, byteStream); // !!!!!!! change compression rate to change packets size
byte data[] = byteStream.toByteArray();
Log.e(TAG, "SIZE: " + data.length);
int nb_packets = (int) Math.ceil(data.length/(float)DATAGRAM_MAX_SIZE);
int size = DATAGRAM_MAX_SIZE;
/* Loop through slices */
for(i = 0; i < nb_packets; i++)
{
if(i >0 && i == nb_packets-1) size = data.length - i * DATAGRAM_MAX_SIZE;
/* Set additional header */
byte[] data2 = new byte[HEADER_SIZE + size];
data2[0] = (byte)frame_nb;
data2[1] = (byte)nb_packets;
data2[2] = (byte)i;
data2[3] = (byte)(size >> 8);
data2[4] = (byte)size;
/* Copy current slice to byte array */
System.arraycopy(data, i * DATAGRAM_MAX_SIZE, data2, HEADER_SIZE, size);
try
{
size_p = data2.length;
DatagramPacket packet = new DatagramPacket(data2, size_p, serverAddr, 9000);
socket.send(packet);
Log.e(TAG, "Sent a cam frame!");
} catch (Exception e) { Log.e(TAG, "Error: ", e);}
}
frame_nb++;
if(frame_nb == 128) frame_nb=0;
}
}
/* function converting image to RGB format taken from project: ViewfinderEE368
* http://www.stanford.edu/class/ee368/Android/ViewfinderEE368/
*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height)
{
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}
// Preview callback used whenever new frame is available...send image via UDP !!!
private class cam_PreviewCallback implements PreviewCallback
{
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
if(STOP_THREAD == true)
{
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
return;
}
if (mBitmap == null) //create Bitmap image first time
{
Camera.Parameters params = camera.getParameters();
width_ima = params.getPreviewSize().width;
height_ima = params.getPreviewSize().height;
mBitmap = Bitmap.createBitmap(width_ima, height_ima, Bitmap.Config.RGB_565);
mRGBData = new int[width_ima * height_ima];
}
decodeYUV420SP(mRGBData, data, width_ima, height_ima);
mBitmap.setPixels(mRGBData, 0, width_ima, 0, 0, width_ima, height_ima);
send_data_UDP();
}
}
}
매니페스트
package carl.IOIO_car;
import android.hardware.SensorManager;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class Main_thread extends Thread
{
SurfaceView parent_context;
SensorManager mSensorManager = null;
Cam_thread the_cam;
Sensors_thread the_sensors;
IOIO_Thread ioio_thread_;
String ip_address;
Android_Activity the_app;
public Main_thread(Android_Activity app, SurfaceView v, SensorManager m, String ip)
{
super();
parent_context = v;
mSensorManager = m;
ip_address = ip;
the_app = app;
Log.e("Debug Main", "IP is " + ip_address);
the_cam = new Cam_thread(parent_context,ip_address);
the_sensors = new Sensors_thread(mSensorManager,ip_address);
ioio_thread_ = new IOIO_Thread(the_app, ip_address);
}
public void run()
{
//ioio_thread_.start();
the_cam.start_thread();
}
public void stop_simu()
{
the_cam.stop_thread();
the_sensors.stop_thread();
//ioio_thread_.abort();
}
}
Cam_Thread이 원래 코드 박사 오로에 의해 작성되었습니다. http://www.cogsci.uci.edu/~noros/android_car.html
이 문제를 해결했습니다. 이 코드가 작동하지 않는 유일한 이유는 nexus 7과 droid x가 더미 표면 뷰를 카메라 객체에 전달하는 것을 지원하지 않기 때문입니다. 활동에서 모든 카메라가 만들어야하는 귀하의 응답은 거짓입니다. 내가보기에 그냥 잘 전달하여 스레드 내에서 카메라를 인스턴스 수있었습니다. – EDPittore
안녕하세요 @ EDPittore 내가 같은 문제가 있지만 다른 상황에 직면하고, 당신이 얼굴을 다음 그들이 제발 도와주세요 제발 도와주세요, 내 질문에 stackoverflow ** http : //stackoverflow.com/questions/25801536/android-camera-app 라이브 벽지의 멋진 배경을 가진 ** –