Android support for different capture qualities

This commit is contained in:
Björn Ritzl 2020-07-16 11:27:36 +02:00
parent 599f899f07
commit 0954735da0
6 changed files with 145 additions and 92 deletions

View File

@ -14,7 +14,7 @@ Or point to the ZIP file of [a specific release](https://github.com/defold/exten
# Supported platforms # Supported platforms
The currently supported platforms are: OSX + iOS The currently supported platforms are macOS, iOS and Android
# FAQ # FAQ
@ -52,10 +52,10 @@ camera.CAPTURE_QUALITY_LOW
## Status constants ## Status constants
```lua ```lua
camera.STATUS_STARTED camera.CAMERA_STARTED
camera.STATUS_STOPPED camera.CAMERA_STOPPED
camera.STATUS_NOT_PERMITTED camera.CAMERA_NOT_PERMITTED
camera.STATUS_ERROR camera.CAMERA_ERROR
``` ```
@ -64,8 +64,8 @@ camera.STATUS_ERROR
Start camera capture using the specified camera (front/back) and capture quality. This may trigger a camera usage permission popup. When the popup has been dismissed the callback will be invoked with camera start status. Start camera capture using the specified camera (front/back) and capture quality. This may trigger a camera usage permission popup. When the popup has been dismissed the callback will be invoked with camera start status.
```lua ```lua
camera.start_capture(camera.CAMERA_TYPE_BACK, camera.CAPTURE_QUALITY_HIGH, function(self, status) camera.start_capture(camera.CAMERA_TYPE_BACK, camera.CAPTURE_QUALITY_HIGH, function(self, message)
if status == camera.STATUS_STARTED then if message == camera.CAMERA_STARTED then
-- do stuff -- do stuff
end end
end) end)

View File

@ -20,38 +20,38 @@ import android.graphics.SurfaceTexture; // API 11
import android.view.Surface; import android.view.Surface;
import java.util.List;
import java.io.IOException; import java.io.IOException;
public class AndroidCamera extends Fragment public class AndroidCamera extends Fragment
{ {
private static final String TAG = AndroidCamera.class.getSimpleName(); private static final String TAG = AndroidCamera.class.getSimpleName();
enum CameraMessage private static final int CAMERA_STARTED = 0;
{ private static final int CAMERA_STOPPED = 1;
CAMERA_STARTED(0), private static final int CAMERA_NOT_PERMITTED = 2;
CAMERA_STOPPED(1), private static final int CAMERA_ERROR = 3;
CAMERA_NOT_PERMITTED(2),
CAMERA_ERROR(3);
private final int value; private static final int CAPTURE_QUALITY_LOW = 0;
private static final int CAPTURE_QUALITY_MEDIUM = 1;
private static final int CAPTURE_QUALITY_HIGH = 2;
private CameraMessage(int value) { private static final int CAMERA_TYPE_FRONT = 0;
this.value = value; private static final int CAMERA_TYPE_BACK = 1;
}
public int getValue() {
return value;
}
}
private Camera camera; private Camera camera;
private SurfaceTexture surface; private SurfaceTexture surface;
private boolean newFrame; private boolean newFrame;
private int position;
private int quality;
private Camera.Size size;
private static Context context; private static Context context;
static native void frameUpdate(int[] data); static native void frameUpdate(int[] data);
static native void queueMessage(int message); static native void queueMessage(int message);
static native void captureStarted(int width, int height);
public static AndroidCamera getCamera(Context context) public static AndroidCamera getCamera(Context context)
@ -84,7 +84,7 @@ public class AndroidCamera extends Fragment
} }
else else
{ {
queueMessage(CameraMessage.CAMERA_ERROR.getValue()); queueMessage(CAMERA_ERROR);
} }
} }
@ -123,7 +123,7 @@ public class AndroidCamera extends Fragment
if(cameraId == -1) if(cameraId == -1)
{ {
queueMessage(CameraMessage.CAMERA_ERROR.getValue()); queueMessage(CAMERA_ERROR);
return; return;
} }
@ -131,8 +131,24 @@ public class AndroidCamera extends Fragment
camera = Camera.open(cameraId); camera = Camera.open(cameraId);
Camera.Parameters params = camera.getParameters(); Camera.Parameters params = camera.getParameters();
params.setPreviewSize(640, 480);
params.setPictureSize(640, 480); List<Camera.Size> sizes = params.getSupportedPreviewSizes();
switch(this.quality)
{
case CAPTURE_QUALITY_HIGH:
this.size = sizes.get(sizes.size() - 1);
break;
case CAPTURE_QUALITY_LOW:
this.size = sizes.get(0);
break;
case CAPTURE_QUALITY_MEDIUM:
default:
this.size = sizes.get((int)Math.ceil(sizes.size() / 2));
break;
}
params.setPreviewSize(this.size.width, this.size.height);
params.setPictureSize(this.size.width, this.size.height);
params.setPictureFormat(PixelFormat.JPEG); params.setPictureFormat(PixelFormat.JPEG);
params.setJpegQuality(90); params.setJpegQuality(90);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
@ -150,7 +166,7 @@ public class AndroidCamera extends Fragment
if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270) if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270)
flip = true; flip = true;
int[] pixels = convertYUV420_NV21toARGB8888(data, 640, 480, flip); int[] pixels = convertYUV420_NV21toARGB8888(data, AndroidCamera.this.size.width, AndroidCamera.this.size.height, flip);
frameUpdate(pixels); frameUpdate(pixels);
} }
}); });
@ -163,18 +179,21 @@ public class AndroidCamera extends Fragment
{ {
} }
captureStarted(this.size.width, this.size.height);
camera.startPreview(); camera.startPreview();
queueMessage(CameraMessage.CAMERA_STARTED.getValue()); queueMessage(CAMERA_STARTED);
} }
public void startPreview() public void startPreview(int position, int quality)
{ {
if(camera != null) if(camera != null)
{ {
queueMessage(CameraMessage.CAMERA_STARTED.getValue()); queueMessage(CAMERA_STARTED);
return; return;
} }
this.position = position;
this.quality = quality;
requestPermission(); requestPermission();
} }
@ -185,7 +204,7 @@ public class AndroidCamera extends Fragment
camera.stopPreview(); camera.stopPreview();
camera.release(); camera.release();
camera = null; camera = null;
queueMessage(CameraMessage.CAMERA_STOPPED.getValue()); queueMessage(CAMERA_STOPPED);
} }
} }

View File

@ -102,7 +102,7 @@ static int StartCapture(lua_State* L)
Camera_DestroyCallback(); Camera_DestroyCallback();
g_DefoldCamera.m_Callback = dmScript::CreateCallback(L, 3); g_DefoldCamera.m_Callback = dmScript::CreateCallback(L, 3);
CameraPlatform_StartCapture(&g_DefoldCamera.m_VideoBuffer, type, quality, g_DefoldCamera.m_Params); CameraPlatform_StartCapture(&g_DefoldCamera.m_VideoBuffer, type, quality);
return 1; return 1;
} }
@ -120,6 +120,7 @@ static int GetInfo(lua_State* L)
{ {
DM_LUA_STACK_CHECK(L, 1); DM_LUA_STACK_CHECK(L, 1);
CameraPlatform_GetCameraInfo(g_DefoldCamera.m_Params);
lua_newtable(L); lua_newtable(L);
lua_pushstring(L, "width"); lua_pushstring(L, "width");
lua_pushnumber(L, g_DefoldCamera.m_Params.m_Width); lua_pushnumber(L, g_DefoldCamera.m_Params.m_Width);

View File

@ -4,18 +4,21 @@
#include "camera_private.h" #include "camera_private.h"
static const uint32_t CAMERA_WIDTH = 640; static jclass g_CameraClass = 0;
static const uint32_t CAMERA_HEIGHT = 480; static jobject g_CameraObject = 0;
static jmethodID g_GetCameraMethodId = 0;
static jmethodID g_StartPreviewMethodId = 0;
static jmethodID g_StopPreviewMethodId = 0;
static jclass g_cameraClass = 0; static jint *g_Data = 0;
static jobject g_cameraObject = 0; static bool g_FrameLock = false;
static jmethodID g_getCameraMethodId = 0;
static jmethodID g_startPreviewMethodId = 0;
static jmethodID g_stopPreviewMethodId = 0;
static jint g_data[CAMERA_WIDTH * CAMERA_HEIGHT]; static uint32_t g_Width = 0;
static bool g_frameLock = false; static uint32_t g_Height = 0;
static CameraType g_Type;
static CaptureQuality g_Quality;
static dmBuffer::HBuffer* g_Buffer = 0;
static dmBuffer::HBuffer g_VideoBuffer = 0; static dmBuffer::HBuffer g_VideoBuffer = 0;
@ -55,14 +58,15 @@ extern "C"
{ {
JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data); JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data);
JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message); JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage(JNIEnv * env, jobject jobj, jint message);
JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_captureStarted(JNIEnv * env, jobject jobj, jint width, jint height);
} }
JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data) JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data)
{ {
if(!g_frameLock) if(!g_FrameLock)
{ {
env->GetIntArrayRegion(data, 0, CAMERA_WIDTH*CAMERA_HEIGHT, g_data); env->GetIntArrayRegion(data, 0, g_Width * g_Height, g_Data);
g_frameLock = true; g_FrameLock = true;
} }
} }
@ -71,6 +75,39 @@ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_queueMessage
Camera_QueueMessage((CameraMessage)message); Camera_QueueMessage((CameraMessage)message);
} }
JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_captureStarted(JNIEnv * env, jobject jobj, jint width, jint height)
{
// As default behavior, we want portrait mode
if (width > height) {
uint32_t tmp = width;
width = height;
height = tmp;
}
g_Width = (uint32_t)width;
g_Height = (uint32_t)height;
if (g_Data)
{
delete g_Data;
}
uint32_t size = g_Width * g_Height;
g_Data = new jint[size];
dmBuffer::StreamDeclaration streams_decl[] = {
{dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
};
dmBuffer::Create(size, streams_decl, 1, g_Buffer);
g_VideoBuffer = *g_Buffer;
}
void CameraPlatform_GetCameraInfo(CameraInfo& outparams)
{
outparams.m_Width = g_Width;
outparams.m_Height = g_Height;
outparams.m_Type = g_Type;
}
int CameraPlatform_Initialize() int CameraPlatform_Initialize()
{ {
@ -82,8 +119,8 @@ int CameraPlatform_Initialize()
// get the AndroidCamera class // get the AndroidCamera class
jclass tmp = GetClass(env, "com.defold.android.camera/AndroidCamera"); jclass tmp = GetClass(env, "com.defold.android.camera/AndroidCamera");
g_cameraClass = (jclass)env->NewGlobalRef(tmp); g_CameraClass = (jclass)env->NewGlobalRef(tmp);
if(!g_cameraClass) if(!g_CameraClass)
{ {
dmLogError("Could not find class 'com.defold.android.camera/AndroidCamera'."); dmLogError("Could not find class 'com.defold.android.camera/AndroidCamera'.");
Detach(env); Detach(env);
@ -91,17 +128,17 @@ int CameraPlatform_Initialize()
} }
// get an instance of the AndroidCamera class using the getCamera() method // get an instance of the AndroidCamera class using the getCamera() method
g_getCameraMethodId = env->GetStaticMethodID(g_cameraClass, "getCamera", "(Landroid/content/Context;)Lcom/defold/android/camera/AndroidCamera;"); g_GetCameraMethodId = env->GetStaticMethodID(g_CameraClass, "getCamera", "(Landroid/content/Context;)Lcom/defold/android/camera/AndroidCamera;");
if(!g_getCameraMethodId) if(!g_GetCameraMethodId)
{ {
dmLogError("Could not get static method 'getCamera'."); dmLogError("Could not get static method 'getCamera'.");
Detach(env); Detach(env);
return false; return false;
} }
jobject tmp1 = env->CallStaticObjectMethod(g_cameraClass, g_getCameraMethodId, dmGraphics::GetNativeAndroidActivity()); jobject tmp1 = env->CallStaticObjectMethod(g_CameraClass, g_GetCameraMethodId, dmGraphics::GetNativeAndroidActivity());
g_cameraObject = (jobject)env->NewGlobalRef(tmp1); g_CameraObject = (jobject)env->NewGlobalRef(tmp1);
if(!g_cameraObject) if(!g_CameraObject)
{ {
dmLogError("Could not create instance."); dmLogError("Could not create instance.");
Detach(env); Detach(env);
@ -109,15 +146,15 @@ int CameraPlatform_Initialize()
} }
// get reference to startPreview() and stopPreview() methods // get reference to startPreview() and stopPreview() methods
g_startPreviewMethodId = env->GetMethodID(g_cameraClass, "startPreview", "()V"); g_StartPreviewMethodId = env->GetMethodID(g_CameraClass, "startPreview", "(II)V");
if(!g_startPreviewMethodId) if(!g_StartPreviewMethodId)
{ {
dmLogError("Could not get startPreview() method."); dmLogError("Could not get startPreview() method.");
Detach(env); Detach(env);
return false; return false;
} }
g_stopPreviewMethodId = env->GetMethodID(g_cameraClass, "stopPreview", "()V"); g_StopPreviewMethodId = env->GetMethodID(g_CameraClass, "stopPreview", "()V");
if(!g_stopPreviewMethodId) if(!g_StopPreviewMethodId)
{ {
dmLogError("Could not get stopPreview() method."); dmLogError("Could not get stopPreview() method.");
Detach(env); Detach(env);
@ -128,62 +165,49 @@ int CameraPlatform_Initialize()
return true; return true;
} }
void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams) void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
{ {
if (!g_cameraObject) if (!g_CameraObject)
{ {
Camera_QueueMessage(CAMERA_ERROR); Camera_QueueMessage(CAMERA_ERROR);
return; return;
} }
outparams.m_Width = (uint32_t)CAMERA_WIDTH; g_Buffer = buffer;
outparams.m_Height = (uint32_t)CAMERA_HEIGHT; g_Type = type;
g_Quality = quality;
// As default behavior, we want portrait mode
if (outparams.m_Width > outparams.m_Height) {
uint32_t tmp = outparams.m_Width;
outparams.m_Width = outparams.m_Height;
outparams.m_Height = tmp;
}
uint32_t size = outparams.m_Width * outparams.m_Height;
dmBuffer::StreamDeclaration streams_decl[] = {
{dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
};
dmBuffer::Create(size, streams_decl, 1, buffer);
g_VideoBuffer = *buffer;
JNIEnv* env = Attach(); JNIEnv* env = Attach();
env->CallVoidMethod(g_cameraObject, g_startPreviewMethodId); env->CallVoidMethod(g_CameraObject, g_StartPreviewMethodId);
Detach(env); Detach(env);
} }
void CameraPlatform_StopCapture() void CameraPlatform_StopCapture()
{ {
if (!g_cameraObject) if (!g_CameraObject)
{ {
Camera_QueueMessage(CAMERA_ERROR); Camera_QueueMessage(CAMERA_ERROR);
return; return;
} }
JNIEnv* env = Attach(); JNIEnv* env = Attach();
env->CallVoidMethod(g_cameraObject, g_stopPreviewMethodId); env->CallVoidMethod(g_CameraObject, g_StopPreviewMethodId);
Detach(env); Detach(env);
} }
void CameraPlatform_UpdateCapture() void CameraPlatform_UpdateCapture()
{ {
if(g_frameLock) if(g_FrameLock)
{ {
int width = CAMERA_WIDTH; // the video buffer is in landscape mode
int height = CAMERA_HEIGHT; int width = g_Height;
int height = g_Width;
int numChannels = 4; int numChannels = 4;
uint8_t* out; uint8_t* out;
uint32_t outsize; uint32_t outsize;
dmBuffer::GetBytes(g_VideoBuffer, (void**)&out, &outsize); dmBuffer::GetBytes(g_VideoBuffer, (void**)&out, &outsize);
uint32_t* data = (uint32_t*)g_data; uint32_t* data = (uint32_t*)g_Data;
for( int y = 0; y < height; ++y) for( int y = 0; y < height; ++y)
{ {
for( int x = 0; x < width; ++x) for( int x = 0; x < width; ++x)
@ -199,7 +223,7 @@ void CameraPlatform_UpdateCapture()
out[index+2] = (argb>>16)&0xFF; // B out[index+2] = (argb>>16)&0xFF; // B
} }
} }
g_frameLock = false; g_FrameLock = false;
} }
} }

View File

@ -30,6 +30,8 @@ struct IOSCamera
{ {
CameraCaptureDelegate* m_Delegate; CameraCaptureDelegate* m_Delegate;
dmBuffer::HBuffer m_VideoBuffer; dmBuffer::HBuffer m_VideoBuffer;
uint32_t m_Width;
uint32_t m_Height;
// TODO: Support audio buffers // TODO: Support audio buffers
IOSCamera() : m_Delegate(0), m_VideoBuffer(0) IOSCamera() : m_Delegate(0), m_VideoBuffer(0)
@ -351,12 +353,18 @@ static CMVideoDimensions FlipCoords(AVCaptureVideoDataOutput* output, const CMVi
@end @end
int CameraPlatform_Initialize() void CameraPlatform_GetCameraInfo(CameraInfo& outparams)
{
outparams.m_Width = g_Camera.m_Width;
outparams.m_Height = g_Camera.m_Height;
}
int CameraPlatform_Initialize(uint32_t width, uint32_t height)
{ {
return 1; return 1;
} }
void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams) void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
{ {
if(g_Camera.m_Delegate == 0) if(g_Camera.m_Delegate == 0)
{ {
@ -373,10 +381,10 @@ void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType
BOOL started = [g_Camera.m_Delegate startCamera: cameraposition quality: quality]; BOOL started = [g_Camera.m_Delegate startCamera: cameraposition quality: quality];
outparams.m_Width = (uint32_t)g_Camera.m_Delegate->m_Size.width; g_Camera.m_Width = (uint32_t)g_Camera.m_Delegate->m_Size.width;
outparams.m_Height = (uint32_t)g_Camera.m_Delegate->m_Size.height; g_Camera.m_Height = (uint32_t)g_Camera.m_Delegate->m_Size.height;
uint32_t size = outparams.m_Width * outparams.m_Height; uint32_t size = g_Camera.m_Width * g_Camera.m_Height;
dmBuffer::StreamDeclaration streams_decl[] = { dmBuffer::StreamDeclaration streams_decl[] = {
{dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3} {dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
}; };
@ -395,7 +403,7 @@ void CameraPlatform_StartCaptureAuthorized(dmBuffer::HBuffer* buffer, CameraType
} }
} }
void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams) void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality)
{ {
// Only check for permission on iOS 7+ and macOS 10.14+ // Only check for permission on iOS 7+ and macOS 10.14+
if ([AVCaptureDevice respondsToSelector:@selector(authorizationStatusForMediaType:)]) if ([AVCaptureDevice respondsToSelector:@selector(authorizationStatusForMediaType:)])
@ -406,7 +414,7 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
{ {
// The user has previously granted access to the camera. // The user has previously granted access to the camera.
dmLogInfo("AVAuthorizationStatusAuthorized"); dmLogInfo("AVAuthorizationStatusAuthorized");
CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams); CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
} }
else if (status == AVAuthorizationStatusNotDetermined) else if (status == AVAuthorizationStatusNotDetermined)
{ {
@ -415,7 +423,7 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) { [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) { if (granted) {
dmLogInfo("AVAuthorizationStatusNotDetermined - granted!"); dmLogInfo("AVAuthorizationStatusNotDetermined - granted!");
CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams); CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
} }
else else
{ {
@ -439,7 +447,7 @@ void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, Cap
} }
else else
{ {
CameraPlatform_StartCaptureAuthorized(buffer, type, quality, outparams); CameraPlatform_StartCaptureAuthorized(buffer, type, quality);
} }
} }

View File

@ -31,8 +31,9 @@ enum CameraMessage
}; };
extern int CameraPlatform_Initialize(); extern int CameraPlatform_Initialize();
extern void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams); extern void CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality);
extern void CameraPlatform_UpdateCapture(); extern void CameraPlatform_UpdateCapture();
extern void CameraPlatform_StopCapture(); extern void CameraPlatform_StopCapture();
extern void CameraPlatform_GetCameraInfo(CameraInfo& outparams);
void Camera_QueueMessage(CameraMessage message); void Camera_QueueMessage(CameraMessage message);