diff --git a/camera/manifests/android/AndroidManifest.xml b/camera/manifests/android/AndroidManifest.xml new file mode 100644 index 0000000..314b774 --- /dev/null +++ b/camera/manifests/android/AndroidManifest.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/camera/manifests/android/build.gradle b/camera/manifests/android/build.gradle new file mode 100644 index 0000000..7947fa2 --- /dev/null +++ b/camera/manifests/android/build.gradle @@ -0,0 +1,3 @@ +dependencies { + compile 'com.android.support:support-compat:28.0.0' +} diff --git a/camera/src/android/AndroidCamera.java b/camera/src/android/AndroidCamera.java new file mode 100644 index 0000000..9b88c18 --- /dev/null +++ b/camera/src/android/AndroidCamera.java @@ -0,0 +1,262 @@ +package com.defold.android.camera; + +import android.content.Context; +import android.app.Activity; +import android.app.Fragment; +import android.app.FragmentManager; +import android.os.Build; +import android.os.Bundle; +import android.Manifest; +import android.support.v4.content.ContextCompat; +import android.content.pm.PackageManager; +import android.util.Log; + +import android.hardware.Camera; +import android.hardware.Camera.CameraInfo; +import android.hardware.Camera.PreviewCallback; + +import android.graphics.PixelFormat; +import android.graphics.SurfaceTexture; // API 11 + +import android.view.Surface; + +import java.io.IOException; + +class AndroidCamera +{ + private static final String TAG = AndroidCamera.class.getSimpleName(); + private static final String PERMISSION_FRAGMENT_TAG = PermissionsFragment.class.getSimpleName(); + + public static class PermissionsFragment extends Fragment { + private AndroidCamera camera; + + public PermissionsFragment(final Activity activity, final AndroidCamera camera) { + this.camera = camera; + final FragmentManager fragmentManager = activity.getFragmentManager(); + if (fragmentManager.findFragmentByTag(PERMISSION_FRAGMENT_TAG) == null) { + fragmentManager.beginTransaction().add(this, PERMISSION_FRAGMENT_TAG).commit(); + } + } + + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setRetainInstance(true); + } + + @Override + public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { + Log.d(TAG, "onRequestPermissionsResult " + requestCode + " " + permissions[0] + " " + grantResults[0]); + camera.onRequestPermissionsResult(grantResults[0]); + } + } + + enum CameraMessage + { + CAMERA_STARTED(0), + CAMERA_STOPPED(1), + CAMERA_NOT_PERMITTED(2), + CAMERA_ERROR(3), + CAMERA_SHOW_PERMISSION_RATIONALE(4); + + private final int value; + + private CameraMessage(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + } + + private PermissionsFragment permissionFragment; + private Camera camera; + private SurfaceTexture surface; + private boolean newFrame; + + private static Context context; + + static native void frameUpdate(int[] data); + static native void queueMessage(int message); + + + public static AndroidCamera getCamera(Context context) + { + return new AndroidCamera(context); + } + + private AndroidCamera(final Context context) + { + this.context = context; + permissionFragment = new PermissionsFragment((Activity)context, this); + } + + private void requestPermission() { + Log.d(TAG, "requestPermission"); + final Activity activity = (Activity)context; + if (Build.VERSION.SDK_INT < 23) + { + Log.d(TAG, "requestPermission SDK_INT < 23"); + final int grantResult = ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA); + onRequestPermissionsResult(grantResult); + } + else + { + Log.d(TAG, "requestPermission fragment"); + final FragmentManager fragmentManager = activity.getFragmentManager(); + final Fragment fragment = fragmentManager.findFragmentByTag(PERMISSION_FRAGMENT_TAG); + final String[] permissions = new String[] { Manifest.permission.CAMERA }; + fragment.requestPermissions(permissions, 100); + } + } + + public synchronized void onRequestPermissionsResult(int grantResult) { + Log.d(TAG, "onRequestPermissionsResult " + grantResult); + if (grantResult == PackageManager.PERMISSION_GRANTED) + { + Log.d(TAG, "onRequestPermissionsResult startPreviewAuthorized"); + startPreviewAuthorized(); + } + else + { + Log.d(TAG, "onRequestPermissionsResult ERROR"); + queueMessage(CameraMessage.CAMERA_ERROR.getValue()); + } + } + + private void startPreviewAuthorized() + { + CameraInfo info = new CameraInfo(); + int cameraId = -1; + int numberOfCameras = Camera.getNumberOfCameras(); + + for(int i = 0; i < numberOfCameras; i++) + { + Camera.getCameraInfo(i, info); + if(info.facing == CameraInfo.CAMERA_FACING_BACK) + { + cameraId = i; + break; + } + } + + if(cameraId == -1) + { + queueMessage(CameraMessage.CAMERA_ERROR.getValue()); + return; + } + + surface = new SurfaceTexture(0); + camera = Camera.open(cameraId); + + Camera.Parameters params = camera.getParameters(); + params.setPreviewSize(640, 480); + params.setPictureSize(640, 480); + params.setPictureFormat(PixelFormat.JPEG); + params.setJpegQuality(90); + params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); + camera.setParameters(params); + + final Activity activity = (Activity)context; + int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); + + camera.setPreviewCallback(new PreviewCallback() { + public void onPreviewFrame(byte[] data, Camera arg1) { + + boolean flip = false; + int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();; + + if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270) + flip = true; + + int[] pixels = convertYUV420_NV21toARGB8888(data, 640, 480, flip); + frameUpdate(pixels); + } + }); + + try + { + camera.setPreviewTexture(surface); + } + catch(IOException ioe) + { + } + Log.d(TAG, "startPreviewAuthorized starting camera"); + camera.startPreview(); + queueMessage(CameraMessage.CAMERA_STARTED.getValue()); + } + + public void startPreview() + { + Log.d(TAG, "startPreview"); + if(camera != null) + { + queueMessage(CameraMessage.CAMERA_STARTED.getValue()); + return; + } + + requestPermission(); + } + + public void stopPreview() + { + if(camera != null) + { + camera.stopPreview(); + camera.release(); + camera = null; + queueMessage(CameraMessage.CAMERA_STOPPED.getValue()); + } + } + + private static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height, boolean flip) { + int size = width*height; + int offset = size; + int[] pixels = new int[size]; + int u, v, y1, y2, y3, y4; + + int startPos = 0; + int helperIdx = -1; + if(flip) + { + startPos = size - 1; + helperIdx = 1; + } + + // i along Y and the final pixels + // k along pixels U and V + for(int i=0, k=0; i < size; i+=2, k+=2) { + y1 = data[i ]&0xff; + y2 = data[i+1]&0xff; + y3 = data[width+i ]&0xff; + y4 = data[width+i+1]&0xff; + + v = data[offset+k ]&0xff; + u = data[offset+k+1]&0xff; + v = v-128; + u = u-128; + + pixels[startPos - helperIdx*i ] = convertYUVtoARGB(y1, u, v); + pixels[startPos - helperIdx*(i+1)] = convertYUVtoARGB(y2, u, v); + pixels[startPos - helperIdx*(width+i) ] = convertYUVtoARGB(y3, u, v); + pixels[startPos - helperIdx*(width+i+1)] = convertYUVtoARGB(y4, u, v); + + if (i!=0 && (i+2)%width==0) + i += width; + } + + return pixels; + } + + // Alt: https://github.com/Jaa-c/android-camera-demo/blob/master/src/com/jaa/camera/CameraSurfaceView.java + private static int convertYUVtoARGB(int y, int u, int v) { + int r = y + (int)(1.772f*v); + int g = y - (int)(0.344f*v + 0.714f*u); + int b = y + (int)(1.402f*u); + r = r>255? 255 : r<0 ? 0 : r; + g = g>255? 255 : g<0 ? 0 : g; + b = b>255? 255 : b<0 ? 0 : b; + return 0xff000000 | r | (g<<8) | (b<<16); + } +}; diff --git a/camera/src/camera.cpp b/camera/src/camera.cpp index bd1c3b2..fa60d3b 100644 --- a/camera/src/camera.cpp +++ b/camera/src/camera.cpp @@ -10,7 +10,7 @@ #define DLIB_LOG_DOMAIN LIB_NAME #include -#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX) +#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX) || defined(DM_PLATFORM_ANDROID) #include #include "camera_private.h" @@ -27,22 +27,23 @@ struct DefoldCamera // Information about the currently set camera CameraInfo m_Params; - dmArray m_MessageQueue; + dmArray m_MessageQueue; dmScript::LuaCallbackInfo* m_Callback; dmMutex::HMutex m_Mutex; }; DefoldCamera g_DefoldCamera; -void Camera_QueueMessage(CameraStatus status) +void Camera_QueueMessage(CameraMessage message) { + dmLogInfo("Camera_QueueMessage %d", message); DM_MUTEX_SCOPED_LOCK(g_DefoldCamera.m_Mutex); if (g_DefoldCamera.m_MessageQueue.Full()) { g_DefoldCamera.m_MessageQueue.OffsetCapacity(1); } - g_DefoldCamera.m_MessageQueue.Push(status); + g_DefoldCamera.m_MessageQueue.Push(message); } static void Camera_ProcessQueue() @@ -56,22 +57,28 @@ static void Camera_ProcessQueue() { break; } - CameraStatus status = g_DefoldCamera.m_MessageQueue[i]; + CameraMessage message = g_DefoldCamera.m_MessageQueue[i]; - if (status == STATUS_STARTED) + if (message == CAMERA_STARTED) { + dmLogInfo("Camera_ProcessQueue CAMERA_STARTED"); // Increase ref count dmScript::LuaHBuffer luabuffer = {g_DefoldCamera.m_VideoBuffer, false}; dmScript::PushBuffer(L, luabuffer); g_DefoldCamera.m_VideoBufferLuaRef = dmScript::Ref(L, LUA_REGISTRYINDEX); } - else if (status == STATUS_STOPPED) + else if (message == CAMERA_STOPPED) { + dmLogInfo("Camera_ProcessQueue CAMERA_STOPPED"); dmScript::Unref(L, LUA_REGISTRYINDEX, g_DefoldCamera.m_VideoBufferLuaRef); // We want it destroyed by the GC g_DefoldCamera.m_VideoBufferLuaRef = 0; } + else + { + dmLogInfo("Camera_ProcessQueue SOMETHING ELSE"); + } - lua_pushnumber(L, (lua_Number)status); + lua_pushnumber(L, (lua_Number)message); int ret = lua_pcall(L, 2, 0, 0); if (ret != 0) { @@ -176,10 +183,10 @@ static void LuaInit(lua_State* L) SETCONSTANT(CAPTURE_QUALITY_MEDIUM) SETCONSTANT(CAPTURE_QUALITY_HIGH) - SETCONSTANT(STATUS_STARTED) - SETCONSTANT(STATUS_STOPPED) - SETCONSTANT(STATUS_NOT_PERMITTED) - SETCONSTANT(STATUS_ERROR) + SETCONSTANT(CAMERA_STARTED) + SETCONSTANT(CAMERA_STOPPED) + SETCONSTANT(CAMERA_NOT_PERMITTED) + SETCONSTANT(CAMERA_ERROR) #undef SETCONSTANT @@ -197,11 +204,13 @@ dmExtension::Result InitializeCamera(dmExtension::Params* params) { LuaInit(params->m_L); g_DefoldCamera.m_Mutex = dmMutex::New(); + CameraPlatform_Initialize(); return dmExtension::RESULT_OK; } static dmExtension::Result UpdateCamera(dmExtension::Params* params) { + CameraPlatform_UpdateCapture(); Camera_ProcessQueue(); return dmExtension::RESULT_OK; } diff --git a/camera/src/camera_android.cpp b/camera/src/camera_android.cpp new file mode 100644 index 0000000..1ca7522 --- /dev/null +++ b/camera/src/camera_android.cpp @@ -0,0 +1,204 @@ +#include + +#if defined(DM_PLATFORM_ANDROID) + +#include "camera_private.h" + +static const uint32_t CAMERA_WIDTH = 640; +static const uint32_t CAMERA_HEIGHT = 480; + +static jclass g_cameraClass = 0; +static jmethodID g_initMethodId = 0; +static jmethodID g_startPreviewMethodId = 0; +static jmethodID g_stopPreviewMethodId = 0; +static jmethodID g_getCameraMethodId = 0; +static jmethodID g_setCallbackDataMethodId = 0; +static jobject g_cameraObject = 0; + +static jint g_data[CAMERA_WIDTH * CAMERA_HEIGHT]; +static bool g_frameLock = false; + +static dmBuffer::HBuffer g_VideoBuffer = 0; + + + +static JNIEnv* Attach() +{ + JNIEnv* env; + JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM(); + vm->AttachCurrentThread(&env, NULL); + return env; +} + +static bool Detach(JNIEnv* env) +{ + bool exception = (bool) env->ExceptionCheck(); + env->ExceptionClear(); + JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM(); + vm->DetachCurrentThread(); + return !exception; +} + +static jclass GetClass(JNIEnv* env, const char* classname) +{ + jclass activity_class = env->FindClass("android/app/NativeActivity"); + jmethodID get_class_loader = env->GetMethodID(activity_class,"getClassLoader", "()Ljava/lang/ClassLoader;"); + jobject cls = env->CallObjectMethod(dmGraphics::GetNativeAndroidActivity(), get_class_loader); + jclass class_loader = env->FindClass("java/lang/ClassLoader"); + jmethodID find_class = env->GetMethodID(class_loader, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;"); + + jstring str_class_name = env->NewStringUTF(classname); + jclass outcls = (jclass)env->CallObjectMethod(cls, find_class, str_class_name); + env->DeleteLocalRef(str_class_name); + return outcls; +} + + +extern "C" +{ + JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data); + JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message); +} + +JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data) +{ + if(!g_frameLock) + { + env->GetIntArrayRegion(data, 0, CAMERA_WIDTH*CAMERA_HEIGHT, g_data); + g_frameLock = true; + } +} + +JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message) +{ + dmLogInfo("Java_com_defold_android_camera_AndroidCamera_queueMessage %d", (int)message); + Camera_QueueMessage((CameraMessage)message); +} + + +int CameraPlatform_Initialize() +{ + JNIEnv* env = Attach(); + if(!env) + { + return false; + } + + // get the AndroidCamera class + if(!g_cameraClass) + { + jclass tmp = GetClass(env, "com.defold.android.camera/AndroidCamera"); + g_cameraClass = (jclass)env->NewGlobalRef(tmp); + if(!g_cameraClass) + { + dmLogError("Could not find class 'com.defold.android.camera/AndroidCamera'."); + Detach(env); + return false; + } + } + + // get an instance of the AndroidCamera class using the getCamera() method + if(!g_getCameraMethodId) + { + g_getCameraMethodId = env->GetStaticMethodID(g_cameraClass, "getCamera", "(Landroid/content/Context;)Lcom/defold/android/camera/AndroidCamera;"); + if(!g_getCameraMethodId) + { + dmLogError("Could not get static method 'getCamera'."); + Detach(env); + return false; + } + } + if(!g_cameraObject) + { + jobject tmp1 = env->CallStaticObjectMethod(g_cameraClass, g_getCameraMethodId, dmGraphics::GetNativeAndroidActivity()); + g_cameraObject = (jobject)env->NewGlobalRef(tmp1); + } + + // get reference to startPreview() and stopPreview() methods + if(!g_startPreviewMethodId) + { + g_startPreviewMethodId = env->GetMethodID(g_cameraClass, "startPreview", "()V"); + assert(g_startPreviewMethodId); + } + if(!g_stopPreviewMethodId) + { + g_stopPreviewMethodId = env->GetMethodID(g_cameraClass, "stopPreview", "()V"); + assert(g_stopPreviewMethodId); + } + + Detach(env); + return true; +} + +void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams) +{ + dmLogInfo("CameraPlatform_StartCapture"); + outparams.m_Width = (uint32_t)CAMERA_WIDTH; + outparams.m_Height = (uint32_t)CAMERA_HEIGHT; + + // As default behavior, we want portrait mode + if (outparams.m_Width > outparams.m_Height) { + uint32_t tmp = outparams.m_Width; + outparams.m_Width = outparams.m_Height; + outparams.m_Height = tmp; + } + + uint32_t size = outparams.m_Width * outparams.m_Height; + dmBuffer::StreamDeclaration streams_decl[] = { + {dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3} + }; + dmBuffer::Create(size, streams_decl, 1, buffer); + + g_VideoBuffer = *buffer; + + if (g_cameraObject) + { + dmLogInfo("CameraPlatform_StartCapture JNI"); + JNIEnv* env = Attach(); + env->CallVoidMethod(g_cameraObject, g_startPreviewMethodId); + Detach(env); + } +} + +void CameraPlatform_StopCapture() +{ + if (g_cameraObject) + { + JNIEnv* env = Attach(); + env->CallVoidMethod(g_cameraObject, g_stopPreviewMethodId); + Detach(env); + } +} + +void CameraPlatform_UpdateCapture() +{ + if(g_frameLock) + { + int width = CAMERA_WIDTH; + int height = CAMERA_HEIGHT; + int numChannels = 4; + uint8_t* out; + uint32_t outsize; + dmBuffer::GetBytes(g_VideoBuffer, (void**)&out, &outsize); + + uint32_t* data = (uint32_t*)g_data; + for( int y = 0; y < height; ++y) + { + for( int x = 0; x < width; ++x) + { + // We get the image in landscape mode, so we flip it to portrait mode + int index = (width-x-1)*height*3 + (height-y-1)*3; + + // RGB <- ARGB + uint32_t argb = data[y*width+x]; + + out[index+0] = (argb>>0)&0xFF; // R + out[index+1] = (argb>>8)&0xFF; // G + out[index+2] = (argb>>16)&0xFF; // B + } + } + g_frameLock = false; + } +} + +#endif // DM_PLATFORM_ANDROID diff --git a/camera/src/camera.mm b/camera/src/camera_darwin.mm similarity index 97% rename from camera/src/camera.mm rename to camera/src/camera_darwin.mm index dfc4065..2b08da8 100644 --- a/camera/src/camera.mm +++ b/camera/src/camera_darwin.mm @@ -351,6 +351,11 @@ static CMVideoDimensions FlipCoords(AVCaptureVideoDataOutput* output, const CMVi @end +int CameraPlatform_Initialize() +{ + return 1; +} + void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams) { if(g_Camera.m_Delegate == 0) @@ -382,11 +387,11 @@ void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType if (started) { - Camera_QueueMessage(STATUS_STARTED); + Camera_QueueMessage(CAMERA_STARTED); } else { - Camera_QueueMessage(STATUS_ERROR); + Camera_QueueMessage(CAMERA_ERROR); } } @@ -415,7 +420,7 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap else { dmLogInfo("AVAuthorizationStatusNotDetermined - not granted!"); - Camera_QueueMessage(STATUS_NOT_PERMITTED); + Camera_QueueMessage(CAMERA_NOT_PERMITTED); } }]; } @@ -423,13 +428,13 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap { // The user has previously denied access. dmLogInfo("AVAuthorizationStatusDenied"); - Camera_QueueMessage(STATUS_NOT_PERMITTED); + Camera_QueueMessage(CAMERA_NOT_PERMITTED); } else if (status == AVAuthorizationStatusRestricted) { // The user can't grant access due to restrictions. dmLogInfo("AVAuthorizationStatusRestricted"); - Camera_QueueMessage(STATUS_NOT_PERMITTED); + Camera_QueueMessage(CAMERA_NOT_PERMITTED); } } else @@ -451,4 +456,6 @@ void CameraPlatform_StopCapture() } } +void CameraPlatform_UpdateCapture() {} + #endif // DM_PLATFORM_IOS/DM_PLATFORM_OSX diff --git a/camera/src/camera_private.h b/camera/src/camera_private.h index 206a178..45a0e54 100644 --- a/camera/src/camera_private.h +++ b/camera/src/camera_private.h @@ -22,15 +22,18 @@ struct CameraInfo CameraType m_Type; }; -enum CameraStatus +enum CameraMessage { - STATUS_STARTED, - STATUS_STOPPED, - STATUS_NOT_PERMITTED, - STATUS_ERROR + CAMERA_STARTED, + CAMERA_STOPPED, + CAMERA_NOT_PERMITTED, + CAMERA_ERROR, + CAMERA_SHOW_PERMISSION_RATIONALE }; +extern int CameraPlatform_Initialize(); extern void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams); +extern void CameraPlatform_UpdateCapture(); extern void CameraPlatform_StopCapture(); -void Camera_QueueMessage(CameraStatus message); +void Camera_QueueMessage(CameraMessage message); diff --git a/main/main.script b/main/main.script index 51dad22..af725bb 100644 --- a/main/main.script +++ b/main/main.script @@ -26,7 +26,9 @@ local function start_capture(self) end camera.start_capture(type, quality, function(self, status) - if status == camera.STATUS_STARTED then + print("camera.start_capture", status) + pprint(camera) + if status == camera.CAMERA_STARTED then self.cameraframe = camera.get_frame() self.camerainfo = camera.get_info() self.cameratextureheader = {