diff --git a/AndroidManifest.xml b/AndroidManifest.xml
new file mode 100644
index 0000000..e7af694
--- /dev/null
+++ b/AndroidManifest.xml
@@ -0,0 +1,141 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{#android.push_field_title}}
+
+ {{/android.push_field_title}}
+ {{#android.push_field_text}}
+
+ {{/android.push_field_text}}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/README.md b/README.md
index a496490..0ab366c 100644
--- a/README.md
+++ b/README.md
@@ -44,7 +44,7 @@ Returns true if the capture starts well
if camera.start_capture(camera.CAMERA_TYPE_BACK, camera.CAPTURE_QUALITY_HIGH) then
-- do stuff
end
-
+
## camera.stop_capture()
Stops a previously started capture session
@@ -56,12 +56,15 @@ Gets the info from the current capture session
local info = camera.get_info()
print("width", info.width)
print("height", info.height)
-
+
## camera.get_frame()
Retrieves the camera pixel buffer
This buffer has one stream named "rgb", and is of type buffer.VALUE_TYPE_UINT8 and has the value count of 1
self.cameraframe = camera.get_frame()
-
-
+
+
+# Credits
+
+The android version was based on the code from: https://github.com/necula/native-camera
diff --git a/camera/src/android/Camera.h b/camera/src/android/Camera.h
new file mode 100644
index 0000000..0f3fb5a
--- /dev/null
+++ b/camera/src/android/Camera.h
@@ -0,0 +1,30 @@
+#pragma once
+
+class Camera
+{
+public:
+ typedef void (*FrameUpdateCallback)(void* priv, void* data, int width, int height, int numChannels);
+ typedef void (*PhotoSavedCallback)(void* priv, bool success);
+
+ virtual ~Camera();
+
+ static Camera* GetCamera(void* callbackData, FrameUpdateCallback frameUpdateCB, PhotoSavedCallback photoSavedCB);
+ //static Vector2 GetFrameSize();
+ static int GetFrameWidth();
+ static int GetFrameHeight();
+
+ virtual bool Initialize() = 0;
+ virtual void Deinitialize() = 0;
+ virtual bool Start() = 0;
+ virtual void Stop() = 0;
+ virtual void Update() = 0;
+ //virtual void TakePhoto(const char* path) = 0;
+ //virtual void SetFocusPoint(const Vector2& focusPoint) = 0;
+
+ FrameUpdateCallback m_frameUpdateCB;
+ PhotoSavedCallback m_photoSavedCB;
+ void* m_callbackData;
+
+ //bool m_takePhoto;
+ //std::string m_photoPath;
+};
\ No newline at end of file
diff --git a/camera/src/android/Camera.java b/camera/src/android/Camera.java
new file mode 100644
index 0000000..0808ab1
--- /dev/null
+++ b/camera/src/android/Camera.java
@@ -0,0 +1,224 @@
+package com.defold.android.camera;
+
+import android.content.Context;
+import android.app.Activity;
+import android.util.Log;
+
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.Camera.PreviewCallback;
+
+import android.graphics.PixelFormat;
+import android.graphics.SurfaceTexture; // API 11
+
+import android.view.Surface;
+
+import java.io.IOException;
+
+class AndroidCamera
+{
+ public Camera camera;
+ public SurfaceTexture surface;
+ public boolean newFrame;
+
+ public static String photoPath;
+
+ public static Context context;
+
+ static native void frameUpdate(int[] data);
+ static native void photoSaved(long callbackData, boolean success);
+
+ public long callbackData;
+
+
+ public static AndroidCamera getCamera(Context _context)
+ {
+ context = _context;
+ AndroidCamera mc = new AndroidCamera();
+ return mc;
+ }
+
+ public AndroidCamera()
+ {
+ init(context);
+ }
+
+ public void setCallbackData(long callbackData)
+ {
+ this.callbackData = callbackData;
+ }
+
+ public void init(final Context context)
+ {
+ surface = new SurfaceTexture(0);
+
+ CameraInfo info = new CameraInfo();
+ int cameraId = -1;
+ int numberOfCameras = Camera.getNumberOfCameras();
+
+ for(int i = 0; i < numberOfCameras; i++)
+ {
+ Camera.getCameraInfo(i, info);
+ if(info.facing == CameraInfo.CAMERA_FACING_BACK)
+ {
+ cameraId = i;
+ break;
+ }
+ }
+
+ if(cameraId == -1)
+ return;
+
+ camera = Camera.open(cameraId);
+
+ Camera.Parameters params = camera.getParameters();
+ params.setPreviewSize(640, 480);
+ params.setPictureSize(640, 480);
+ params.setPictureFormat(PixelFormat.JPEG);
+ params.setJpegQuality(90);
+ camera.setParameters(params);
+
+ final Activity activity = (Activity)context;
+ int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+
+ camera.setPreviewCallback(new PreviewCallback() {
+ public void onPreviewFrame(byte[] data, Camera arg1) {
+
+ boolean flip = false;
+ int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();;
+
+ if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270)
+ flip = true;
+
+ int[] pixels = convertYUV420_NV21toARGB8888(data, 640, 480, flip);
+ frameUpdate(pixels);
+ }
+ });
+
+ try
+ {
+ camera.setPreviewTexture(surface);
+ }
+ catch(IOException ioe)
+ {
+ }
+ }
+
+ /*public void takePhoto(final String photoPath)
+ {
+ camera.takePicture(null, null, new PictureCallback() {
+ public void onPictureTaken(byte [] rawData, Camera camera) {
+ try {
+ if (rawData != null) {
+ int rawDataLength = rawData.length;
+
+ String dirStr = "";
+ int pos = photoPath.lastIndexOf("/", photoPath.length() - 1);
+ if(pos == -1)
+ return;
+ dirStr = photoPath.substring(0, pos);
+
+ File dir = new File(dirStr);
+ dir.mkdirs();
+
+ File rawoutput = new File(photoPath);
+ rawoutput.createNewFile();
+ FileOutputStream outstream = new FileOutputStream(rawoutput);
+
+ boolean flip = false;
+ int rotation = Utils.getRotation();
+ if(rotation == Surface.ROTATION_180 || rotation == Surface.ROTATION_270)
+ flip = true;
+ if(flip)
+ {
+ Bitmap bitmap = BitmapFactory.decodeByteArray(rawData, 0, rawData.length);
+ ByteArrayOutputStream rotatedStream = new ByteArrayOutputStream();
+
+ // Rotate the Bitmap
+ Matrix matrix = new Matrix();
+ matrix.postRotate(180);
+
+ // We rotate the same Bitmap
+ bitmap = Bitmap.createBitmap(bitmap, 0, 0, 640, 480, matrix, false);
+
+ // We dump the rotated Bitmap to the stream
+ bitmap.compress(CompressFormat.JPEG, 90, rotatedStream);
+
+ rawData = rotatedStream.toByteArray();
+ }
+
+ outstream.write(rawData);
+
+ photoSaved(callbackData, true);
+ }
+ } catch (Exception e) {
+ Log.w("", "[CAMERA] takePhoto error " + e.toString());
+ }
+ }
+ });
+ }*/
+
+ public void startPreview()
+ {
+ if(camera == null)
+ init(context);
+ camera.startPreview();
+ }
+
+ public void stopPreview()
+ {
+ camera.stopPreview();
+ camera.release();
+ camera = null;
+ }
+
+ public static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height, boolean flip) {
+ int size = width*height;
+ int offset = size;
+ int[] pixels = new int[size];
+ int u, v, y1, y2, y3, y4;
+
+ int startPos = 0;
+ int helperIdx = -1;
+ if(flip)
+ {
+ startPos = size - 1;
+ helperIdx = 1;
+ }
+
+ // i along Y and the final pixels
+ // k along pixels U and V
+ for(int i=0, k=0; i < size; i+=2, k+=2) {
+ y1 = data[i ]&0xff;
+ y2 = data[i+1]&0xff;
+ y3 = data[width+i ]&0xff;
+ y4 = data[width+i+1]&0xff;
+
+ v = data[offset+k ]&0xff;
+ u = data[offset+k+1]&0xff;
+ v = v-128;
+ u = u-128;
+
+ pixels[startPos - helperIdx*i ] = convertYUVtoARGB(y1, u, v);
+ pixels[startPos - helperIdx*(i+1)] = convertYUVtoARGB(y2, u, v);
+ pixels[startPos - helperIdx*(width+i) ] = convertYUVtoARGB(y3, u, v);
+ pixels[startPos - helperIdx*(width+i+1)] = convertYUVtoARGB(y4, u, v);
+
+ if (i!=0 && (i+2)%width==0)
+ i += width;
+ }
+
+ return pixels;
+ }
+
+ // Alt: https://github.com/Jaa-c/android-camera-demo/blob/master/src/com/jaa/camera/CameraSurfaceView.java
+ private static int convertYUVtoARGB(int y, int u, int v) {
+ int r = y + (int)(1.772f*v);
+ int g = y - (int)(0.344f*v + 0.714f*u);
+ int b = y + (int)(1.402f*u);
+ r = r>255? 255 : r<0 ? 0 : r;
+ g = g>255? 255 : g<0 ? 0 : g;
+ b = b>255? 255 : b<0 ? 0 : b;
+ return 0xff000000 | r | (g<<8) | (b<<16);
+ }
+};
\ No newline at end of file
diff --git a/camera/src/android/CameraAndroid.cpp b/camera/src/android/CameraAndroid.cpp
new file mode 100644
index 0000000..31718b0
--- /dev/null
+++ b/camera/src/android/CameraAndroid.cpp
@@ -0,0 +1,256 @@
+#include "Camera.h"
+#include // dmGraphics::GetNativeAndroidActivity()
+
+static jclass g_cameraClass = 0;
+static jmethodID g_initMethodId = 0;
+static jmethodID g_startPreviewMethodId = 0;
+static jmethodID g_stopPreviewMethodId = 0;
+static jmethodID g_takePhotoMethodId = 0;
+static jmethodID g_getCameraMethodId = 0;
+static jmethodID g_setCallbackDataMethodId = 0;
+static jobject g_cameraObject = 0;
+
+static jint g_data[640*480];
+static bool g_frameLock = false;
+
+class PCamera : public Camera
+{
+public:
+
+ enum PhotoSavedState
+ {
+ PhotoSaved_Failed,
+ PhotoSaved_OK,
+ PhotoSaved_Waiting
+ };
+
+ PCamera();
+ virtual ~PCamera();
+
+ bool Initialize();
+ void Deinitialize();
+ bool Start();
+ void Stop();
+ void Update();
+ //void TakePhoto(const char* path);
+ //void SetFocusPoint(const Vector2& focusPoint);
+
+ void PhotoSavedCB(bool success);
+
+
+private:
+ bool m_initialized;
+ PhotoSavedState m_photoSavedState;
+};
+
+PCamera::PCamera()
+{
+ m_initialized = false;
+ //m_takePhoto = false;
+ m_photoSavedState = PhotoSaved_Waiting;
+}
+
+PCamera::~PCamera()
+{
+
+}
+
+static JNIEnv* Attach()
+{
+ JNIEnv* env;
+ JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM();
+ vm->AttachCurrentThread(&env, NULL);
+ return env;
+}
+
+static bool Detach(JNIEnv* env)
+{
+ bool exception = (bool) env->ExceptionCheck();
+ env->ExceptionClear();
+ JavaVM* vm = dmGraphics::GetNativeAndroidJavaVM();
+ vm->DetachCurrentThread();
+ return !exception;
+}
+
+static jclass GetClass(JNIEnv* env, const char* classname)
+{
+ jclass activity_class = env->FindClass("android/app/NativeActivity");
+ jmethodID get_class_loader = env->GetMethodID(activity_class,"getClassLoader", "()Ljava/lang/ClassLoader;");
+ jobject cls = env->CallObjectMethod(dmGraphics::GetNativeAndroidActivity(), get_class_loader);
+ jclass class_loader = env->FindClass("java/lang/ClassLoader");
+ jmethodID find_class = env->GetMethodID(class_loader, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;");
+
+ jstring str_class_name = env->NewStringUTF(classname);
+ jclass outcls = (jclass)env->CallObjectMethod(cls, find_class, str_class_name);
+ env->DeleteLocalRef(str_class_name);
+ return outcls;
+}
+
+bool PCamera::Initialize()
+{
+ JNIEnv* env = Attach();
+ if(!env)
+ return false;
+
+ if(!g_cameraClass)
+ {
+ jclass tmp = GetClass(env, "com.defold.android.camera/AndroidCamera");
+ g_cameraClass = (jclass)env->NewGlobalRef(tmp);
+ if(!g_cameraClass)
+ {
+ dmLogError("Could not find class 'com.defold.android.camera/AndroidCamera'.");
+ Detach(env);
+ return false;
+ }
+ }
+
+ if(!g_getCameraMethodId)
+ {
+ g_getCameraMethodId = env->GetStaticMethodID(g_cameraClass, "getCamera", "(Landroid/content/Context;)Lcom/defold/android/camera/AndroidCamera;");
+ if(!g_getCameraMethodId)
+ {
+ dmLogError("Could not get static method 'getCamera'.");
+ Detach(env);
+ return false;
+ }
+ }
+
+ if(!g_cameraObject)
+ {
+ jobject tmp1 = env->CallStaticObjectMethod(g_cameraClass, g_getCameraMethodId, dmGraphics::GetNativeAndroidActivity());
+ g_cameraObject = (jobject)env->NewGlobalRef(tmp1);
+ }
+
+ if(!g_startPreviewMethodId)
+ g_startPreviewMethodId = env->GetMethodID(g_cameraClass, "startPreview", "()V");
+ assert(g_startPreviewMethodId);
+
+ if(!g_stopPreviewMethodId)
+ g_stopPreviewMethodId = env->GetMethodID(g_cameraClass, "stopPreview", "()V");
+ assert(g_stopPreviewMethodId);
+
+ // if(!g_takePhotoMethodId)
+ // g_takePhotoMethodId = env->GetMethodID(g_cameraClass, "takePhoto", "(Ljava/lang/String;)V");
+ // assert(g_takePhotoMethodId);
+
+ if(!g_setCallbackDataMethodId)
+ g_setCallbackDataMethodId = env->GetMethodID(g_cameraClass, "setCallbackData", ("(J)V"));
+ assert(g_setCallbackDataMethodId);
+
+ env->CallVoidMethod(g_cameraObject, g_setCallbackDataMethodId, (long long)this);
+
+ m_initialized = true;
+ Detach(env);
+ return true;
+}
+
+void PCamera::Deinitialize()
+{
+
+}
+
+bool PCamera::Start()
+{
+ JNIEnv* env = Attach();
+ env->CallVoidMethod(g_cameraObject, g_startPreviewMethodId);
+ Detach(env);
+ return true;
+}
+
+void PCamera::Stop()
+{
+ JNIEnv* env = Attach();
+ env->CallVoidMethod(g_cameraObject, g_stopPreviewMethodId);
+ Detach(env);
+}
+
+void PCamera::Update()
+{
+ if(!m_initialized && !Initialize())
+ return;
+
+ if(g_frameLock)
+ {
+ m_frameUpdateCB(m_callbackData, (void*)g_data, 640, 480, 4);
+ g_frameLock = false;
+ }
+
+ // if(m_takePhoto)
+ // {
+ // if(m_photoSavedState != PhotoSaved_Waiting)
+ // {
+ // m_takePhoto = false;
+ // m_photoSavedState = PhotoSaved_Waiting;
+
+ // m_photoSavedCB(m_callbackData, m_photoSavedState);
+ // }
+ // }
+}
+
+// void PCamera::TakePhoto(const char* path)
+// {
+// m_takePhoto = true;
+
+// jstring jPath = g_env->NewStringUTF(path);
+// g_env->CallVoidMethod(g_cameraObject, g_takePhotoMethodId, jPath);
+// g_env->DeleteLocalRef(jPath);
+// }
+
+// void PCamera::SetFocusPoint(const Vector2& focusPoint)
+// {
+
+// }
+
+void PCamera::PhotoSavedCB(bool success)
+{
+ m_photoSavedState = success ? PhotoSaved_OK : PhotoSaved_Failed;
+}
+
+Camera* Camera::GetCamera(void* callbackData, FrameUpdateCallback frameUpdateCB, PhotoSavedCallback photoSavedCB)
+{
+ PCamera *camera = new PCamera;
+ if(!camera)
+ return 0;
+
+ camera->m_frameUpdateCB = frameUpdateCB;
+ camera->m_photoSavedCB = photoSavedCB;
+ camera->m_callbackData = callbackData;
+
+ return camera;
+}
+
+Camera::~Camera()
+{
+
+}
+
+// Vector2 Camera::GetFrameSize()
+// {
+// return Vector2(640, 480);
+// }
+
+
+int Camera::GetFrameWidth() { return 640; }
+int Camera::GetFrameHeight() { return 480; }
+
+extern "C"
+{
+ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data);
+ JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_photoSaved(JNIEnv * env, jobject jobj, jlong callbackData, jboolean success);
+}
+
+JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_frameUpdate(JNIEnv * env, jobject jobj, jintArray data)
+{
+ if(!g_frameLock)
+ {
+ env->GetIntArrayRegion(data, 0, 640*480, g_data);
+ g_frameLock = true;
+ }
+}
+
+JNIEXPORT void JNICALL Java_com_defold_android_camera_AndroidCamera_photoSaved(JNIEnv * env, jobject jobj, jlong callbackData, jboolean success)
+{
+ PCamera* c = (PCamera*)callbackData;
+ if(c)
+ c->PhotoSavedCB(success);
+}
\ No newline at end of file
diff --git a/camera/src/camera.cpp b/camera/src/camera.cpp
index adb1c98..266e093 100644
--- a/camera/src/camera.cpp
+++ b/camera/src/camera.cpp
@@ -11,7 +11,7 @@
#define DLIB_LOG_DOMAIN LIB_NAME
#include
-#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX)
+#if defined(DM_PLATFORM_IOS) || defined(DM_PLATFORM_OSX) || defined(DM_PLATFORM_ANDROID)
#include "camera_private.h"
@@ -94,7 +94,7 @@ static int GetInfo(lua_State* L)
static int GetFrame(lua_State* L)
{
DM_LUA_STACK_CHECK(L, 1);
- lua_rawgeti(L,LUA_REGISTRYINDEX, g_DefoldCamera.m_VideoBufferLuaRef);
+ lua_rawgeti(L,LUA_REGISTRYINDEX, g_DefoldCamera.m_VideoBufferLuaRef);
return 1;
}
@@ -130,23 +130,29 @@ static void LuaInit(lua_State* L)
assert(top == lua_gettop(L));
}
-dmExtension::Result AppInitializeCamera(dmExtension::AppParams* params)
+static dmExtension::Result AppInitializeCamera(dmExtension::AppParams* params)
{
return dmExtension::RESULT_OK;
}
-dmExtension::Result InitializeCamera(dmExtension::Params* params)
+static dmExtension::Result InitializeCamera(dmExtension::Params* params)
{
LuaInit(params->m_L);
return dmExtension::RESULT_OK;
}
-dmExtension::Result AppFinalizeCamera(dmExtension::AppParams* params)
+static dmExtension::Result UpdateCamera(dmExtension::Params* params)
+{
+ CameraPlatform_UpdateCapture();
+ return dmExtension::RESULT_OK;
+}
+
+static dmExtension::Result AppFinalizeCamera(dmExtension::AppParams* params)
{
return dmExtension::RESULT_OK;
}
-dmExtension::Result FinalizeCamera(dmExtension::Params* params)
+static dmExtension::Result FinalizeCamera(dmExtension::Params* params)
{
return dmExtension::RESULT_OK;
}
@@ -165,6 +171,11 @@ static dmExtension::Result InitializeCamera(dmExtension::Params* params)
return dmExtension::RESULT_OK;
}
+static dmExtension::Result UpdateCamera(dmExtension::Params* params)
+{
+ return dmExtension::RESULT_OK;
+}
+
static dmExtension::Result AppFinalizeCamera(dmExtension::AppParams* params)
{
return dmExtension::RESULT_OK;
@@ -178,4 +189,4 @@ static dmExtension::Result FinalizeCamera(dmExtension::Params* params)
#endif // platforms
-DM_DECLARE_EXTENSION(EXTENSION_NAME, LIB_NAME, AppInitializeCamera, AppFinalizeCamera, InitializeCamera, 0, 0, FinalizeCamera)
+DM_DECLARE_EXTENSION(EXTENSION_NAME, LIB_NAME, AppInitializeCamera, AppFinalizeCamera, InitializeCamera, UpdateCamera, 0, FinalizeCamera)
diff --git a/camera/src/camera_android.cpp b/camera/src/camera_android.cpp
new file mode 100644
index 0000000..be04fa8
--- /dev/null
+++ b/camera/src/camera_android.cpp
@@ -0,0 +1,116 @@
+#include
+#include "camera_private.h"
+#include "android/Camera.h"
+
+#if defined(DM_PLATFORM_ANDROID)
+
+struct AndroidCamera
+{
+ dmBuffer::HBuffer m_VideoBuffer;
+ Camera* m_Camera;
+
+ AndroidCamera() : m_VideoBuffer(0), m_Camera(0)
+ {
+ }
+};
+
+AndroidCamera g_Camera;
+
+static void FrameUpdateCallback(void* _ctx, void* _data, int width, int height, int numChannels)
+{
+ AndroidCamera* ctx = (AndroidCamera*)_ctx;
+ uint8_t* out;
+ uint32_t outsize;
+ dmBuffer::GetBytes(g_Camera.m_VideoBuffer, (void**)&out, &outsize);
+
+ uint32_t* data = (uint32_t*)_data;
+ for( int y = 0; y < height; ++y)
+ {
+ for( int x = 0; x < width; ++x)
+ {
+ // We get the image in landscape mode, so we flip it to portrait mode
+ int index = (width-x-1)*height*3 + (height-y-1)*3;
+
+ // RGB <- ARGB
+ uint32_t argb = data[y*width+x];
+
+ out[index+0] = (argb>>0)&0xFF; // R
+ out[index+1] = (argb>>8)&0xFF; // G
+ out[index+2] = (argb>>16)&0xFF; // B
+ }
+ }
+}
+
+static void PhotoSavedCallback(void* ctx, bool success)
+{
+ (void)ctx;
+ (void)success;
+}
+
+
+int CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams)
+{
+ if (g_Camera.m_Camera) {
+ dmLogError("Camera already started!");
+ return 0;
+ }
+ g_Camera.m_Camera = Camera::GetCamera(&g_Camera, FrameUpdateCallback, PhotoSavedCallback);
+ if (!g_Camera.m_Camera) {
+ dmLogError("Camera failed to start");
+ return 0;
+ }
+
+ if (!g_Camera.m_Camera->Initialize()) {
+ dmLogError("Camera failed to initialize");
+ return 0;
+ }
+
+ if (!g_Camera.m_Camera->Start()) {
+ dmLogError("Camera failed to initialize");
+ return 0;
+ }
+
+ outparams.m_Width = (uint32_t)g_Camera.m_Camera->GetFrameWidth();
+ outparams.m_Height = (uint32_t)g_Camera.m_Camera->GetFrameHeight();
+
+ // As default behavior, we want portrait mode
+ if (outparams.m_Width > outparams.m_Height) {
+ uint32_t tmp = outparams.m_Width;
+ outparams.m_Width = outparams.m_Height;
+ outparams.m_Height = tmp;
+ }
+
+ uint32_t size = outparams.m_Width * outparams.m_Height;
+ dmBuffer::StreamDeclaration streams_decl[] = {
+ {dmHashString64("rgb"), dmBuffer::VALUE_TYPE_UINT8, 3}
+ };
+
+ dmBuffer::Create(size, streams_decl, 1, buffer);
+
+ g_Camera.m_VideoBuffer = *buffer;
+
+ return 1;
+}
+
+int CameraPlatform_StopCapture()
+{
+ if (g_Camera.m_Camera) {
+ g_Camera.m_Camera->Stop();
+ g_Camera.m_Camera->Deinitialize();
+ delete g_Camera.m_Camera;
+ }
+ g_Camera.m_Camera = 0;
+ return 1;
+}
+
+int CameraPlatform_UpdateCapture()
+{
+ if (!g_Camera.m_Camera) {
+ dmLogError("Camera has not been started");
+ return 0;
+ }
+ g_Camera.m_Camera->Update();
+ return 1;
+}
+
+#endif // DM_PLATFORM_ANDROID
\ No newline at end of file
diff --git a/camera/src/camera.mm b/camera/src/camera_darwin.mm
similarity index 98%
rename from camera/src/camera.mm
rename to camera/src/camera_darwin.mm
index 9c466ae..6a3ff3e 100644
--- a/camera/src/camera.mm
+++ b/camera/src/camera_darwin.mm
@@ -121,8 +121,8 @@ IOSCamera g_Camera;
}
}
-- (void)captureOutput:(AVCaptureOutput *)captureOutput
- didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
@@ -256,7 +256,7 @@ IOSCamera g_Camera;
static CMVideoDimensions FlipCoords(AVCaptureVideoDataOutput* output, const CMVideoDimensions& in)
{
- CMVideoDimensions out = in;
+ CMVideoDimensions out = in;
#if defined(DM_PLATFORM_IOS)
AVCaptureConnection* conn = [output connectionWithMediaType:AVMediaTypeVideo];
switch (conn.videoOrientation) {
@@ -274,7 +274,7 @@ static CMVideoDimensions FlipCoords(AVCaptureVideoDataOutput* output, const CMVi
- ( BOOL ) startCamera: (AVCaptureDevicePosition) cameraPosition
- quality: (CaptureQuality)quality
+ quality: (CaptureQuality)quality
{
// 1. Find the back camera
if ( ![ self findCamera: cameraPosition ] )
@@ -398,4 +398,9 @@ int CameraPlatform_StopCapture()
return 1;
}
+int CameraPlatform_UpdateCapture()
+{
+ return 1;
+}
+
#endif // DM_PLATFORM_IOS/DM_PLATFORM_OSX
diff --git a/camera/src/camera_private.h b/camera/src/camera_private.h
index f0f6131..c75becf 100644
--- a/camera/src/camera_private.h
+++ b/camera/src/camera_private.h
@@ -24,3 +24,4 @@ struct CameraInfo
extern int CameraPlatform_StartCapture(dmBuffer::HBuffer* buffer, CameraType type, CaptureQuality quality, CameraInfo& outparams);
extern int CameraPlatform_StopCapture();
+extern int CameraPlatform_UpdateCapture();
diff --git a/game.project b/game.project
index 421ce8a..bbd5248 100644
--- a/game.project
+++ b/game.project
@@ -23,6 +23,7 @@ bundle_identifier = com.defold.camera
[android]
package = com.defold.camera
+manifest = /AndroidManifest.xml
[osx]
bundle_identifier = com.defold.camera