diff --git a/README.md b/README.md
index 3d5c9d5..c06b590 100644
--- a/README.md
+++ b/README.md
@@ -14,7 +14,7 @@ Or point to the ZIP file of [a specific release](https://github.com/defold/exten
# Supported platforms
-The currently supported platforms are: OSX + iOS
+The currently supported platforms are macOS, iOS and Android
# FAQ
@@ -52,10 +52,10 @@ camera.CAPTURE_QUALITY_LOW
## Status constants
```lua
-camera.STATUS_STARTED
-camera.STATUS_STOPPED
-camera.STATUS_NOT_PERMITTED
-camera.STATUS_ERROR
+camera.CAMERA_STARTED
+camera.CAMERA_STOPPED
+camera.CAMERA_NOT_PERMITTED
+camera.CAMERA_ERROR
```
@@ -64,8 +64,8 @@ camera.STATUS_ERROR
Start camera capture using the specified camera (front/back) and capture quality. This may trigger a camera usage permission popup. When the popup has been dismissed the callback will be invoked with camera start status.
```lua
-camera.start_capture(camera.CAMERA_TYPE_BACK, camera.CAPTURE_QUALITY_HIGH, function(self, status)
- if status == camera.STATUS_STARTED then
+camera.start_capture(camera.CAMERA_TYPE_BACK, camera.CAPTURE_QUALITY_HIGH, function(self, message)
+ if message == camera.CAMERA_STARTED then
-- do stuff
end
end)
diff --git a/camera/manifests/android/AndroidManifest.xml b/camera/manifests/android/AndroidManifest.xml
new file mode 100644
index 0000000..314b774
--- /dev/null
+++ b/camera/manifests/android/AndroidManifest.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/camera/manifests/android/build.gradle b/camera/manifests/android/build.gradle
new file mode 100644
index 0000000..7947fa2
--- /dev/null
+++ b/camera/manifests/android/build.gradle
@@ -0,0 +1,3 @@
+dependencies {
+ compile 'com.android.support:support-compat:28.0.0'
+}
diff --git a/camera/src/android/AndroidCamera.java b/camera/src/android/AndroidCamera.java
new file mode 100644
index 0000000..f049e02
--- /dev/null
+++ b/camera/src/android/AndroidCamera.java
@@ -0,0 +1,260 @@
+package com.defold.android.camera;
+
+import android.content.Context;
+import android.app.Activity;
+import android.app.Fragment;
+import android.app.FragmentManager;
+import android.os.Build;
+import android.os.Bundle;
+import android.Manifest;
+import android.support.v4.content.ContextCompat;
+import android.content.pm.PackageManager;
+import android.util.Log;
+
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.Camera.PreviewCallback;
+
+import android.graphics.PixelFormat;
+import android.graphics.SurfaceTexture; // API 11
+
+import android.view.Surface;
+
+import java.util.List;
+
+import java.io.IOException;
+
+public class AndroidCamera extends Fragment
+{
+ private static final String TAG = AndroidCamera.class.getSimpleName();
+
+ private static final int CAMERA_STARTED = 0;
+ private static final int CAMERA_STOPPED = 1;
+ private static final int CAMERA_NOT_PERMITTED = 2;
+ private static final int CAMERA_ERROR = 3;
+
+ private static final int CAPTURE_QUALITY_LOW = 0;
+ private static final int CAPTURE_QUALITY_MEDIUM = 1;
+ private static final int CAPTURE_QUALITY_HIGH = 2;
+
+ private static final int CAMERA_TYPE_FRONT = 0;
+ private static final int CAMERA_TYPE_BACK = 1;
+
+ private Camera camera;
+ private SurfaceTexture surface;
+ private boolean newFrame;
+ private int position;
+ private int quality;
+ private Camera.Size size;
+
+ private static Context context;
+
+ static native void frameUpdate(int[] data);
+ static native void queueMessage(int message);
+ static native void captureStarted(int width, int height);
+
+
+ public static AndroidCamera getCamera(Context context)
+ {
+ return new AndroidCamera(context);
+ }
+
+ private AndroidCamera(final Context context)
+ {
+ this.context = context;
+
+ final FragmentManager fragmentManager = ((Activity)context).getFragmentManager();
+ if (fragmentManager.findFragmentByTag(TAG) == null) {
+ fragmentManager.beginTransaction().add(this, TAG).commit();
+ }
+ }
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setRetainInstance(true);
+ }
+
+ @Override
+ public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
+ int grantResult = grantResults[0];
+ if (grantResult == PackageManager.PERMISSION_GRANTED)
+ {
+ startPreviewAuthorized();
+ }
+ else
+ {
+ queueMessage(CAMERA_ERROR);
+ }
+ }
+
+ private void requestPermission() {
+ final Activity activity = (Activity)context;
+ if (Build.VERSION.SDK_INT < 23)
+ {
+ final int grantResult = ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA);
+ // onRequestPermissionsResult(grantResult);
+ onRequestPermissionsResult(0, new String[] { Manifest.permission.CAMERA }, new int[] { grantResult });
+ }
+ else
+ {
+ final FragmentManager fragmentManager = activity.getFragmentManager();
+ final Fragment fragment = fragmentManager.findFragmentByTag(TAG);
+ final String[] permissions = new String[] { Manifest.permission.CAMERA };
+ fragment.requestPermissions(permissions, 100);
+ }
+ }
+
+ private void startPreviewAuthorized()
+ {
+ CameraInfo info = new CameraInfo();
+ int cameraId = -1;
+ int numberOfCameras = Camera.getNumberOfCameras();
+
+ for(int i = 0; i < numberOfCameras; i++)
+ {
+ Camera.getCameraInfo(i, info);
+ if(info.facing == CameraInfo.CAMERA_FACING_BACK)
+ {
+ cameraId = i;
+ break;
+ }
+ }
+
+ if(cameraId == -1)
+ {
+ queueMessage(CAMERA_ERROR);
+ return;
+ }
+
+ surface = new SurfaceTexture(0);
+ camera = Camera.open(cameraId);
+
+ Camera.Parameters params = camera.getParameters();
+
+ List sizes = params.getSupportedPreviewSizes();
+ switch(this.quality)
+ {
+ case CAPTURE_QUALITY_HIGH:
+ this.size = sizes.get(sizes.size() - 1);
+ break;
+ case CAPTURE_QUALITY_LOW:
+ this.size = sizes.get(0);
+ break;
+ case CAPTURE_QUALITY_MEDIUM:
+ default:
+ this.size = sizes.get((int)Math.ceil(sizes.size() / 2));
+ break;
+ }
+
+ params.setPreviewSize(this.size.width, this.size.height);
+ params.setPictureSize(this.size.width, this.size.height);
+ params.setPictureFormat(PixelFormat.JPEG);
+ params.setJpegQuality(90);
+ params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+ camera.setParameters(params);
+
+ final Activity activity = (Activity)context;
+ int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+
+ camera.setPreviewCallback(new PreviewCallback() {
+ public void onPreviewFrame(byte[] data, Camera arg1) {
+
+ boolean flip = false;
+ int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();;
+
+ if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270)
+ flip = true;
+
+ int[] pixels = convertYUV420_NV21toARGB8888(data, AndroidCamera.this.size.width, AndroidCamera.this.size.height, flip);
+ frameUpdate(pixels);
+ }
+ });
+
+ try
+ {
+ camera.setPreviewTexture(surface);
+ }
+ catch(IOException ioe)
+ {
+ }
+
+ captureStarted(this.size.width, this.size.height);
+ camera.startPreview();
+ queueMessage(CAMERA_STARTED);
+ }
+
+ public void startPreview(int position, int quality)
+ {
+ if(camera != null)
+ {
+ queueMessage(CAMERA_STARTED);
+ return;
+ }
+
+ this.position = position;
+ this.quality = quality;
+ requestPermission();
+ }
+
+ public void stopPreview()
+ {
+ if(camera != null)
+ {
+ camera.stopPreview();
+ camera.release();
+ camera = null;
+ queueMessage(CAMERA_STOPPED);
+ }
+ }
+
+ private static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height, boolean flip) {
+ int size = width*height;
+ int offset = size;
+ int[] pixels = new int[size];
+ int u, v, y1, y2, y3, y4;
+
+ int startPos = 0;
+ int helperIdx = -1;
+ if(flip)
+ {
+ startPos = size - 1;
+ helperIdx = 1;
+ }
+
+ // i along Y and the final pixels
+ // k along pixels U and V
+ for(int i=0, k=0; i < size; i+=2, k+=2) {
+ y1 = data[i ]&0xff;
+ y2 = data[i+1]&0xff;
+ y3 = data[width+i ]&0xff;
+ y4 = data[width+i+1]&0xff;
+
+ v = data[offset+k ]&0xff;
+ u = data[offset+k+1]&0xff;
+ v = v-128;
+ u = u-128;
+
+ pixels[startPos - helperIdx*i ] = convertYUVtoARGB(y1, u, v);
+ pixels[startPos - helperIdx*(i+1)] = convertYUVtoARGB(y2, u, v);
+ pixels[startPos - helperIdx*(width+i) ] = convertYUVtoARGB(y3, u, v);
+ pixels[startPos - helperIdx*(width+i+1)] = convertYUVtoARGB(y4, u, v);
+
+ if (i!=0 && (i+2)%width==0)
+ i += width;
+ }
+
+ return pixels;
+ }
+
+ // Alt: https://github.com/Jaa-c/android-camera-demo/blob/master/src/com/jaa/camera/CameraSurfaceView.java
+ private static int convertYUVtoARGB(int y, int u, int v) {
+ int r = y + (int)(1.772f*v);
+ int g = y - (int)(0.344f*v + 0.714f*u);
+ int b = y + (int)(1.402f*u);
+ r = r>255? 255 : r<0 ? 0 : r;
+ g = g>255? 255 : g<0 ? 0 : g;
+ b = b>255? 255 : b<0 ? 0 : b;
+ return 0xff000000 | r | (g<<8) | (b<<16);
+ }
+};
diff --git a/camera/src/camera.cpp b/camera/src/camera.cpp
index bd1c3b2..e026b78 100644
--- a/camera/src/camera.cpp
+++ b/camera/src/camera.cpp
@@ -10,7 +10,7 @@
#define DLIB_LOG_DOMAIN LIB_NAME
#include
-#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX)
+#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX) || defined(DM_PLATFORM_ANDROID)
#include
#include "camera_private.h"
@@ -27,22 +27,23 @@ struct DefoldCamera
// Information about the currently set camera
CameraInfo m_Params;
- dmArray m_MessageQueue;
+ dmArray m_MessageQueue;
dmScript::LuaCallbackInfo* m_Callback;
dmMutex::HMutex m_Mutex;
};
DefoldCamera g_DefoldCamera;
-void Camera_QueueMessage(CameraStatus status)
+void Camera_QueueMessage(CameraMessage message)
{
+ dmLogInfo("Camera_QueueMessage %d", message);
DM_MUTEX_SCOPED_LOCK(g_DefoldCamera.m_Mutex);
if (g_DefoldCamera.m_MessageQueue.Full())
{
g_DefoldCamera.m_MessageQueue.OffsetCapacity(1);
}
- g_DefoldCamera.m_MessageQueue.Push(status);
+ g_DefoldCamera.m_MessageQueue.Push(message);
}
static void Camera_ProcessQueue()
@@ -56,22 +57,22 @@ static void Camera_ProcessQueue()
{
break;
}
- CameraStatus status = g_DefoldCamera.m_MessageQueue[i];
+ CameraMessage message = g_DefoldCamera.m_MessageQueue[i];
- if (status == STATUS_STARTED)
+ if (message == CAMERA_STARTED)
{
// Increase ref count
dmScript::LuaHBuffer luabuffer = {g_DefoldCamera.m_VideoBuffer, false};
dmScript::PushBuffer(L, luabuffer);
g_DefoldCamera.m_VideoBufferLuaRef = dmScript::Ref(L, LUA_REGISTRYINDEX);
}
- else if (status == STATUS_STOPPED)
+ else if (message == CAMERA_STOPPED)
{
dmScript::Unref(L, LUA_REGISTRYINDEX, g_DefoldCamera.m_VideoBufferLuaRef); // We want it destroyed by the GC
g_DefoldCamera.m_VideoBufferLuaRef = 0;
}
- lua_pushnumber(L, (lua_Number)status);
+ lua_pushnumber(L, (lua_Number)message);
int ret = lua_pcall(L, 2, 0, 0);
if (ret != 0)
{
@@ -101,7 +102,7 @@ static int StartCapture(lua_State* L)
Camera_DestroyCallback();
g_DefoldCamera.m_Callback = dmScript::CreateCallback(L, 3);
- CameraPlatform_StartCapture(&g_DefoldCamera.m_VideoBuffer, type, quality, g_DefoldCamera.m_Params);
+ CameraPlatform_StartCapture(&g_DefoldCamera.m_VideoBuffer, type, quality);
return 1;
}
@@ -119,6 +120,7 @@ static int GetInfo(lua_State* L)
{
DM_LUA_STACK_CHECK(L, 1);
+ CameraPlatform_GetCameraInfo(g_DefoldCamera.m_Params);
lua_newtable(L);
lua_pushstring(L, "width");
lua_pushnumber(L, g_DefoldCamera.m_Params.m_Width);
@@ -176,10 +178,10 @@ static void LuaInit(lua_State* L)
SETCONSTANT(CAPTURE_QUALITY_MEDIUM)
SETCONSTANT(CAPTURE_QUALITY_HIGH)
- SETCONSTANT(STATUS_STARTED)
- SETCONSTANT(STATUS_STOPPED)
- SETCONSTANT(STATUS_NOT_PERMITTED)
- SETCONSTANT(STATUS_ERROR)
+ SETCONSTANT(CAMERA_STARTED)
+ SETCONSTANT(CAMERA_STOPPED)
+ SETCONSTANT(CAMERA_NOT_PERMITTED)
+ SETCONSTANT(CAMERA_ERROR)
#undef SETCONSTANT
@@ -197,11 +199,13 @@ dmExtension::Result InitializeCamera(dmExtension::Params* params)
{
LuaInit(params->m_L);
g_DefoldCamera.m_Mutex = dmMutex::New();
+ CameraPlatform_Initialize();
return dmExtension::RESULT_OK;
}
static dmExtension::Result UpdateCamera(dmExtension::Params* params)
{
+ CameraPlatform_UpdateCapture();
Camera_ProcessQueue();
return dmExtension::RESULT_OK;
}
diff --git a/camera/src/camera_android.cpp b/camera/src/camera_android.cpp
new file mode 100644
index 0000000..55bf776
--- /dev/null
+++ b/camera/src/camera_android.cpp
@@ -0,0 +1,216 @@
+#include
+
+#if defined(DM_PLATFORM_ANDROID)
+
+#include "camera_private.h"
+
+static jclass g_CameraClass = 0;
+static jobject g_CameraObject = 0;
+static jmethodID g_GetCameraMethodId = 0;
+static jmethodID g_StartPreviewMethodId = 0;
+static jmethodID g_StopPreviewMethodId = 0;
+
+static jint *g_Data = 0;
+static bool g_FrameLock = false;
+
+static uint32_t g_Width = 0;
+static uint32_t g_Height = 0;
+static CameraType g_Type;
+static CaptureQuality g_Quality;
+
+static dmBuffer::HBuffer* g_Buffer = 0;
+
+
+static JNIEnv* Attach()
+{
+ JNIEnv* env;
+ JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM();
+ vm->AttachCurrentThread(&env, NULL);
+ return env;
+}
+
+static bool Detach(JNIEnv* env)
+{
+ bool exception = (bool) env->ExceptionCheck();
+ env->ExceptionClear();
+ JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM();
+ vm->DetachCurrentThread();
+ return !exception;
+}
+
+static jclass GetClass(JNIEnv* env, const char* classname)
+{
+ jclass activity_class = env->FindClass("android/app/NativeActivity");
+ jmethodID get_class_loader = env->GetMethodID(activity_class,"getClassLoader", "()Ljava/lang/ClassLoader;");
+ jobject cls = env->CallObjectMethod(dmGraphics::GetNativeAndroidActivity(), get_class_loader);
+ jclass class_loader = env->FindClass("java/lang/ClassLoader");
+ jmethodID find_class = env->GetMethodID(class_loader, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;");
+
+ jstring str_class_name = env->NewStringUTF(classname);
+ jclass outcls = (jclass)env->CallObjectMethod(cls, find_class, str_class_name);
+ env->DeleteLocalRef(str_class_name);
+ return outcls;
+}
+
+
+extern "C"
+{
+ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data);
+ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message);
+ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_captureStarted(JNIEnv * env, jobject jobj, jint width, jint height);
+}
+
+JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data)
+{
+ if(!g_FrameLock)
+ {
+ env->GetIntArrayRegion(data, 0, g_Width * g_Height, g_Data);
+ g_FrameLock = true;
+ }
+}
+
+JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message)
+{
+ Camera_QueueMessage((CameraMessage)message);
+}
+
+JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_captureStarted(JNIEnv * env, jobject jobj, jint width, jint height)
+{
+ g_Width = (uint32_t)width;
+ g_Height = (uint32_t)height;
+
+ uint32_t size = g_Width * g_Height;
+ delete g_Data;
+ g_Data = new jint[size];
+ dmBuffer::StreamDeclaration streams_decl[] = {
+ {dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
+ };
+ dmBuffer::Create(size, streams_decl, 1, g_Buffer);
+}
+
+
+void CameraPlatform_GetCameraInfo(CameraInfo& outparams)
+{
+ outparams.m_Width = (g_Width > g_Height) ? g_Height : g_Width;
+ outparams.m_Height = (g_Width > g_Height) ? g_Width : g_Height;
+ outparams.m_Type = g_Type;
+}
+
+int CameraPlatform_Initialize()
+{
+ JNIEnv* env = Attach();
+ if(!env)
+ {
+ return false;
+ }
+
+ // get the AndroidCamera class
+ jclass tmp = GetClass(env, "com.defold.android.camera/AndroidCamera");
+ g_CameraClass = (jclass)env->NewGlobalRef(tmp);
+ if(!g_CameraClass)
+ {
+ dmLogError("Could not find class 'com.defold.android.camera/AndroidCamera'.");
+ Detach(env);
+ return false;
+ }
+
+ // get an instance of the AndroidCamera class using the getCamera() method
+ g_GetCameraMethodId = env->GetStaticMethodID(g_CameraClass, "getCamera", "(Landroid/content/Context;)Lcom/defold/android/camera/AndroidCamera;");
+ if(!g_GetCameraMethodId)
+ {
+ dmLogError("Could not get static method 'getCamera'.");
+ Detach(env);
+ return false;
+ }
+
+ jobject tmp1 = env->CallStaticObjectMethod(g_CameraClass, g_GetCameraMethodId, dmGraphics::GetNativeAndroidActivity());
+ g_CameraObject = (jobject)env->NewGlobalRef(tmp1);
+ if(!g_CameraObject)
+ {
+ dmLogError("Could not create instance.");
+ Detach(env);
+ return false;
+ }
+
+ // get reference to startPreview() and stopPreview() methods
+ g_StartPreviewMethodId = env->GetMethodID(g_CameraClass, "startPreview", "(II)V");
+ if(!g_StartPreviewMethodId)
+ {
+ dmLogError("Could not get startPreview() method.");
+ Detach(env);
+ return false;
+ }
+ g_StopPreviewMethodId = env->GetMethodID(g_CameraClass, "stopPreview", "()V");
+ if(!g_StopPreviewMethodId)
+ {
+ dmLogError("Could not get stopPreview() method.");
+ Detach(env);
+ return false;
+ }
+
+ Detach(env);
+ return true;
+}
+
+void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
+{
+ if (!g_CameraObject)
+ {
+ Camera_QueueMessage(CAMERA_ERROR);
+ return;
+ }
+
+ g_Buffer = buffer;
+ g_Type = type;
+ g_Quality = quality;
+
+ JNIEnv* env = Attach();
+ env->CallVoidMethod(g_CameraObject, g_StartPreviewMethodId);
+ Detach(env);
+}
+
+void CameraPlatform_StopCapture()
+{
+ if (!g_CameraObject)
+ {
+ Camera_QueueMessage(CAMERA_ERROR);
+ return;
+ }
+
+ JNIEnv* env = Attach();
+ env->CallVoidMethod(g_CameraObject, g_StopPreviewMethodId);
+ Detach(env);
+}
+
+void CameraPlatform_UpdateCapture()
+{
+ if(g_FrameLock)
+ {
+ int width = g_Width;
+ int height = g_Height;
+ int numChannels = 4;
+ uint8_t* out;
+ uint32_t outsize;
+ dmBuffer::GetBytes(*g_Buffer, (void**)&out, &outsize);
+
+ uint32_t* data = (uint32_t*)g_Data;
+ for( int y = 0; y < height; ++y)
+ {
+ for( int x = 0; x < width; ++x)
+ {
+ // We get the image in landscape mode, so we flip it to portrait mode
+ int index = (width-x-1)*height*3 + (height-y-1)*3;
+
+ // RGB <- ARGB
+ uint32_t argb = data[y*width+x];
+
+ out[index+0] = (argb>>0)&0xFF; // R
+ out[index+1] = (argb>>8)&0xFF; // G
+ out[index+2] = (argb>>16)&0xFF; // B
+ }
+ }
+ g_FrameLock = false;
+ }
+}
+
+#endif // DM_PLATFORM_ANDROID
diff --git a/camera/src/camera.mm b/camera/src/camera_darwin.mm
similarity index 94%
rename from camera/src/camera.mm
rename to camera/src/camera_darwin.mm
index dfc4065..f92d377 100644
--- a/camera/src/camera.mm
+++ b/camera/src/camera_darwin.mm
@@ -30,6 +30,8 @@ struct IOSCamera
{
CameraCaptureDelegate* m_Delegate;
dmBuffer::HBuffer m_VideoBuffer;
+ uint32_t m_Width;
+ uint32_t m_Height;
// TODO: Support audio buffers
IOSCamera() : m_Delegate(0), m_VideoBuffer(0)
@@ -351,7 +353,18 @@ static CMVideoDimensions FlipCoords(AVCaptureVideoDataOutput* output, const CMVi
@end
-void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams)
+void CameraPlatform_GetCameraInfo(CameraInfo& outparams)
+{
+ outparams.m_Width = g_Camera.m_Width;
+ outparams.m_Height = g_Camera.m_Height;
+}
+
+int CameraPlatform_Initialize(uint32_t width, uint32_t height)
+{
+ return 1;
+}
+
+void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
{
if(g_Camera.m_Delegate == 0)
{
@@ -368,10 +381,10 @@ void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType
BOOL started = [g_Camera.m_Delegate startCamera: cameraposition quality: quality];
- outparams.m_Width = (uint32_t)g_Camera.m_Delegate->m_Size.width;
- outparams.m_Height = (uint32_t)g_Camera.m_Delegate->m_Size.height;
+ g_Camera.m_Width = (uint32_t)g_Camera.m_Delegate->m_Size.width;
+ g_Camera.m_Height = (uint32_t)g_Camera.m_Delegate->m_Size.height;
- uint32_t size = outparams.m_Width * outparams.m_Height;
+ uint32_t size = g_Camera.m_Width * g_Camera.m_Height;
dmBuffer::StreamDeclaration streams_decl[] = {
{dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
};
@@ -382,15 +395,15 @@ void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType
if (started)
{
- Camera_QueueMessage(STATUS_STARTED);
+ Camera_QueueMessage(CAMERA_STARTED);
}
else
{
- Camera_QueueMessage(STATUS_ERROR);
+ Camera_QueueMessage(CAMERA_ERROR);
}
}
-void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams)
+void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
{
// Only check for permission on iOS 7+ and macOS 10.14+
if ([AVCaptureDevice respondsToSelector:@selector(authorizationStatusForMediaType:)])
@@ -401,7 +414,7 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
{
// The user has previously granted access to the camera.
dmLogInfo("AVAuthorizationStatusAuthorized");
- CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams);
+ CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
}
else if (status == AVAuthorizationStatusNotDetermined)
{
@@ -410,12 +423,12 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) {
dmLogInfo("AVAuthorizationStatusNotDetermined - granted!");
- CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams);
+ CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
}
else
{
dmLogInfo("AVAuthorizationStatusNotDetermined - not granted!");
- Camera_QueueMessage(STATUS_NOT_PERMITTED);
+ Camera_QueueMessage(CAMERA_NOT_PERMITTED);
}
}];
}
@@ -423,18 +436,18 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
{
// The user has previously denied access.
dmLogInfo("AVAuthorizationStatusDenied");
- Camera_QueueMessage(STATUS_NOT_PERMITTED);
+ Camera_QueueMessage(CAMERA_NOT_PERMITTED);
}
else if (status == AVAuthorizationStatusRestricted)
{
// The user can't grant access due to restrictions.
dmLogInfo("AVAuthorizationStatusRestricted");
- Camera_QueueMessage(STATUS_NOT_PERMITTED);
+ Camera_QueueMessage(CAMERA_NOT_PERMITTED);
}
}
else
{
- CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams);
+ CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
}
}
@@ -451,4 +464,6 @@ void CameraPlatform_StopCapture()
}
}
+void CameraPlatform_UpdateCapture() {}
+
#endif // DM_PLATFORM_IOS/DM_PLATFORM_OSX
diff --git a/camera/src/camera_private.h b/camera/src/camera_private.h
index 206a178..8757fc8 100644
--- a/camera/src/camera_private.h
+++ b/camera/src/camera_private.h
@@ -22,15 +22,18 @@ struct CameraInfo
CameraType m_Type;
};
-enum CameraStatus
+enum CameraMessage
{
- STATUS_STARTED,
- STATUS_STOPPED,
- STATUS_NOT_PERMITTED,
- STATUS_ERROR
+ CAMERA_STARTED,
+ CAMERA_STOPPED,
+ CAMERA_NOT_PERMITTED,
+ CAMERA_ERROR
};
-extern void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams);
+extern int CameraPlatform_Initialize();
+extern void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality);
+extern void CameraPlatform_UpdateCapture();
extern void CameraPlatform_StopCapture();
+extern void CameraPlatform_GetCameraInfo(CameraInfo& outparams);
-void Camera_QueueMessage(CameraStatus message);
+void Camera_QueueMessage(CameraMessage message);
diff --git a/main/main.script b/main/main.script
index 51dad22..fd50525 100644
--- a/main/main.script
+++ b/main/main.script
@@ -26,7 +26,7 @@ local function start_capture(self)
end
camera.start_capture(type, quality, function(self, status)
- if status == camera.STATUS_STARTED then
+ if status == camera.CAMERA_STARTED then
self.cameraframe = camera.get_frame()
self.camerainfo = camera.get_info()
self.cameratextureheader = {
@@ -37,7 +37,9 @@ local function start_capture(self)
num_mip_maps=1
}
label.set_text("logo#status", "Capture Status: ON")
- else
+ elseif status == camera.CAMERA_STOPPED then
+ label.set_text("logo#status", "Capture Status: OFF")
+ elseif status == camera.CAMERA_ERROR then
label.set_text("logo#status", "Capture Status: ERROR")
end
end)