diff --git a/samples/android/tutorial-4-opencl/.classpath b/samples/android/tutorial-4-opencl/.classpath new file mode 100644 index 0000000000..b76ec6cd48 --- /dev/null +++ b/samples/android/tutorial-4-opencl/.classpath @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/samples/android/tutorial-4-opencl/.cproject b/samples/android/tutorial-4-opencl/.cproject new file mode 100644 index 0000000000..9f3b5fd84f --- /dev/null +++ b/samples/android/tutorial-4-opencl/.cproject @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/samples/android/tutorial-4-opencl/.project b/samples/android/tutorial-4-opencl/.project new file mode 100644 index 0000000000..c86a2386c4 --- /dev/null +++ b/samples/android/tutorial-4-opencl/.project @@ -0,0 +1,49 @@ + + + OpenCV Tutorial 4 - Use OpenCL + + + + + + org.eclipse.cdt.managedbuilder.core.genmakebuilder + clean,full,incremental, + + + + + com.android.ide.eclipse.adt.ResourceManagerBuilder + + + + + com.android.ide.eclipse.adt.PreCompilerBuilder + + + + + org.eclipse.jdt.core.javabuilder + + + + + com.android.ide.eclipse.adt.ApkBuilder + + + + + org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder + full,incremental, + + + + + + com.android.ide.eclipse.adt.AndroidNature + org.eclipse.jdt.core.javanature + org.eclipse.cdt.core.cnature + org.eclipse.cdt.core.ccnature + org.eclipse.cdt.managedbuilder.core.managedBuildNature + org.eclipse.cdt.managedbuilder.core.ScannerConfigNature + + diff --git a/samples/android/tutorial-4-opencl/.settings/org.eclipse.jdt.core.prefs b/samples/android/tutorial-4-opencl/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..48ab4c6b11 --- /dev/null +++ b/samples/android/tutorial-4-opencl/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,4 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 +org.eclipse.jdt.core.compiler.compliance=1.6 +org.eclipse.jdt.core.compiler.source=1.6 diff --git a/samples/android/tutorial-4-opencl/AndroidManifest.xml b/samples/android/tutorial-4-opencl/AndroidManifest.xml new file mode 100644 index 0000000000..6bce3c7191 --- /dev/null +++ b/samples/android/tutorial-4-opencl/AndroidManifest.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/samples/android/tutorial-4-opencl/CMakeLists.txt b/samples/android/tutorial-4-opencl/CMakeLists.txt new file mode 100644 index 0000000000..2d529ffb7a --- /dev/null +++ b/samples/android/tutorial-4-opencl/CMakeLists.txt @@ -0,0 +1,12 @@ +set(sample example-tutorial-4-opencl) + +if(BUILD_FAT_JAVA_LIB) + set(native_deps opencv_java) +else() + set(native_deps opencv_imgproc) +endif() + +add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS ${OpenCV_BINARY_DIR} SDK_TARGET 21 ${ANDROID_SDK_TARGET} NATIVE_DEPS ${native_deps}) +if(TARGET ${sample}) + add_dependencies(opencv_android_examples ${sample}) +endif() diff --git a/samples/android/tutorial-4-opencl/jni/Android.mk b/samples/android/tutorial-4-opencl/jni/Android.mk new file mode 100644 index 0000000000..7a1a6b5ced --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/Android.mk @@ -0,0 +1,19 @@ +LOCAL_PATH := $(call my-dir) + +# add OpenCV +include $(CLEAR_VARS) +OPENCV_INSTALL_MODULES:=on +ifeq ($(O4A_SDK_ROOT),) + include ../../sdk/native/jni/OpenCV.mk +else + include $(O4A_SDK_ROOT)/sdk/native/jni/OpenCV.mk +endif + +# add OpenCL +LOCAL_C_INCLUDES += $(OPENCL_SDK)/include +LOCAL_LDLIBS += -L$(OPENCL_SDK)/lib/$(TARGET_ARCH_ABI) -lOpenCL + +LOCAL_MODULE := JNIrender +LOCAL_SRC_FILES := jni.c GLrender.cpp CLprocessor.cpp +LOCAL_LDLIBS += -llog -lGLESv2 -lEGL +include $(BUILD_SHARED_LIBRARY) \ No newline at end of file diff --git a/samples/android/tutorial-4-opencl/jni/Application.mk b/samples/android/tutorial-4-opencl/jni/Application.mk new file mode 100644 index 0000000000..06db65762a --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/Application.mk @@ -0,0 +1,4 @@ +APP_STL := gnustl_static +APP_GNUSTL_FORCE_CPP_FEATURES := exceptions rtti +APP_ABI := armeabi-v7a +APP_PLATFORM := android-14 diff --git a/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp new file mode 100644 index 0000000000..6d843acdd3 --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp @@ -0,0 +1,212 @@ +#define __CL_ENABLE_EXCEPTIONS +#include + +#include + +#include +#include + +#include "common.hpp" + +const char oclProgB2B[] = "// clBuffer to clBuffer"; +const char oclProgI2B[] = "// clImage to clBuffer"; +const char oclProgI2I[] = \ + "__constant sampler_t sampler = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_NEAREST; \n" \ + "\n" \ + "__kernel void Laplacian( \n" \ + " __read_only image2d_t imgIn, \n" \ + " __write_only image2d_t imgOut \n" \ + " ) { \n" \ + " \n" \ + " const int2 pos = {get_global_id(0), get_global_id(1)}; \n" \ + " \n" \ + " float4 sum = (float4) 0.0f; \n" \ + " sum += read_imagef(imgIn, sampler, pos + (int2)(-1,0)); \n" \ + " sum += read_imagef(imgIn, sampler, pos + (int2)(+1,0)); \n" \ + " sum += read_imagef(imgIn, sampler, pos + (int2)(0,-1)); \n" \ + " sum += read_imagef(imgIn, sampler, pos + (int2)(0,+1)); \n" \ + " sum -= read_imagef(imgIn, sampler, pos) * 4; \n" \ + " \n" \ + " write_imagef(imgOut, pos, sum*10); \n" \ + "} \n"; + +void dumpCLinfo() +{ + LOGD("*** OpenCL info ***"); + try + { + std::vector platforms; + cl::Platform::get(&platforms); + LOGD("OpenCL info: Found %d OpenCL platforms", platforms.size()); + for (int i = 0; i < platforms.size(); ++i) + { + std::string name = platforms[i].getInfo(); + std::string version = platforms[i].getInfo(); + std::string profile = platforms[i].getInfo(); + std::string extensions = platforms[i].getInfo(); + LOGD( "OpenCL info: Platform[%d] = %s, ver = %s, prof = %s, ext = %s", + i, name.c_str(), version.c_str(), profile.c_str(), extensions.c_str() ); + } + + std::vector devices; + platforms[0].getDevices(CL_DEVICE_TYPE_ALL, &devices); + + for (int i = 0; i < devices.size(); ++i) + { + std::string name = devices[i].getInfo(); + std::string extensions = devices[i].getInfo(); + cl_ulong type = devices[i].getInfo(); + LOGD( "OpenCL info: Device[%d] = %s (%s), ext = %s", + i, name.c_str(), (type==CL_DEVICE_TYPE_GPU ? "GPU" : "CPU"), extensions.c_str() ); + } + } + catch(cl::Error& e) + { + LOGE( "OpenCL info: error while gathering OpenCL info: %s (%d)", e.what(), e.err() ); + } + catch(std::exception& e) + { + LOGE( "OpenCL info: error while gathering OpenCL info: %s", e.what() ); + } + catch(...) + { + LOGE( "OpenCL info: unknown error while gathering OpenCL info" ); + } + LOGD("*******************"); +} + +cl::Context theContext; +cl::CommandQueue theQueue; +cl::Program theProgB2B, theProgI2B, theProgI2I; + +void initCL() +{ + dumpCLinfo(); + + EGLDisplay mEglDisplay = eglGetCurrentDisplay(); + if (mEglDisplay == EGL_NO_DISPLAY) + LOGE("initCL: eglGetCurrentDisplay() returned 'EGL_NO_DISPLAY', error = %x", eglGetError()); + + EGLContext mEglContext = eglGetCurrentContext(); + if (mEglContext == EGL_NO_CONTEXT) + LOGE("initCL: eglGetCurrentContext() returned 'EGL_NO_CONTEXT', error = %x", eglGetError()); + + cl_context_properties props[] = + { CL_GL_CONTEXT_KHR, (cl_context_properties) mEglContext, + CL_EGL_DISPLAY_KHR, (cl_context_properties) mEglDisplay, + CL_CONTEXT_PLATFORM, 0, + 0 }; + + try + { + cl::Platform p = cl::Platform::getDefault(); + std::string ext = p.getInfo(); + if(ext.find("cl_khr_gl_sharing") == std::string::npos) + LOGE("Warning: CL-GL sharing isn't supported by PLATFORM"); + props[5] = (cl_context_properties) p(); + + theContext = cl::Context(CL_DEVICE_TYPE_GPU, props); + std::vector devs = theContext.getInfo(); + LOGD("Context returned %d devices, taking the 1st one", devs.size()); + ext = devs[0].getInfo(); + if(ext.find("cl_khr_gl_sharing") == std::string::npos) + LOGE("Warning: CL-GL sharing isn't supported by DEVICE"); + + theQueue = cl::CommandQueue(theContext, devs[0]); + + cl::Program::Sources src(1, std::make_pair(oclProgI2I, sizeof(oclProgI2I))); + theProgI2I = cl::Program(theContext, src); + theProgI2I.build(devs); + + cv::ocl::attachContext(p.getInfo(), p(), theContext(), devs[0]()); + if( cv::ocl::useOpenCL() ) + LOGD("OpenCV+OpenCL works OK!"); + else + LOGE("Can't init OpenCV with OpenCL TAPI"); + } + catch(cl::Error& e) + { + LOGE("cl::Error: %s (%d)", e.what(), e.err()); + } + catch(std::exception& e) + { + LOGE("std::exception: %s", e.what()); + } + catch(...) + { + LOGE( "OpenCL info: unknown error while initializing OpenCL stuff" ); + } + LOGD("initCL completed"); +} + +void closeCL() +{ +} + +#define GL_TEXTURE_2D 0x0DE1 +void procOCL_I2I(int texIn, int texOut, int w, int h) +{ + LOGD("procOCL_I2I(%d, %d, %d, %d)", texIn, texOut, w, h); + cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn); + cl::ImageGL imgOut(theContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, texOut); + std::vector < cl::Memory > images; + images.push_back(imgIn); + images.push_back(imgOut); + + int64_t t = getTimeMs(); + theQueue.enqueueAcquireGLObjects(&images); + theQueue.finish(); + LOGD("enqueueAcquireGLObjects() costs %d ms", getTimeInterval(t)); + + t = getTimeMs(); + cl::Kernel Laplacian(theProgI2I, "Laplacian"); //TODO: may be done once + Laplacian.setArg(0, imgIn); + Laplacian.setArg(1, imgOut); + theQueue.finish(); + LOGD("Kernel() costs %d ms", getTimeInterval(t)); + + t = getTimeMs(); + theQueue.enqueueNDRangeKernel(Laplacian, cl::NullRange, cl::NDRange(w, h), cl::NullRange); + theQueue.finish(); + LOGD("enqueueNDRangeKernel() costs %d ms", getTimeInterval(t)); + + t = getTimeMs(); + theQueue.enqueueReleaseGLObjects(&images); + theQueue.finish(); + LOGD("enqueueReleaseGLObjects() costs %d ms", getTimeInterval(t)); +} + +void procOCL_OCV(int tex, int w, int h) +{ + int64_t t = getTimeMs(); + cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, tex); + std::vector < cl::Memory > images(1, imgIn); + theQueue.enqueueAcquireGLObjects(&images); + theQueue.finish(); + cv::UMat uIn, uOut, uTmp; + cv::ocl::convertFromImage(imgIn(), uIn); + LOGD("loading texture data to OpenCV UMat costs %d ms", getTimeInterval(t)); + theQueue.enqueueReleaseGLObjects(&images); + + t = getTimeMs(); + //cv::blur(uIn, uOut, cv::Size(5, 5)); + cv::Laplacian(uIn, uTmp, CV_8U); + cv:multiply(uTmp, 10, uOut); + cv::ocl::finish(); + LOGD("OpenCV processing costs %d ms", getTimeInterval(t)); + + t = getTimeMs(); + cl::ImageGL imgOut(theContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, tex); + images.clear(); + images.push_back(imgOut); + theQueue.enqueueAcquireGLObjects(&images); + cl_mem clBuffer = (cl_mem)uOut.handle(cv::ACCESS_READ); + cl_command_queue q = (cl_command_queue)cv::ocl::Queue::getDefault().ptr(); + size_t offset = 0; + size_t origin[3] = { 0, 0, 0 }; + size_t region[3] = { w, h, 1 }; + CV_Assert(clEnqueueCopyBufferToImage (q, clBuffer, imgOut(), offset, origin, region, 0, NULL, NULL) == CL_SUCCESS); + theQueue.enqueueReleaseGLObjects(&images); + cv::ocl::finish(); + LOGD("uploading results to texture costs %d ms", getTimeInterval(t)); +} diff --git a/samples/android/tutorial-4-opencl/jni/GLrender.cpp b/samples/android/tutorial-4-opencl/jni/GLrender.cpp new file mode 100644 index 0000000000..194fc4f915 --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/GLrender.cpp @@ -0,0 +1,372 @@ +#include +#include + +#include + +#include "common.hpp" + +float vertices[] = { + -1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, -1.0f, + 1.0f, 1.0f +}; +float texCoordOES[] = { + 0.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 1.0f, 0.0f +}; +float texCoord2D[] = { + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f +}; + +const char vss[] = \ + "attribute vec2 vPosition;\n" \ + "attribute vec2 vTexCoord;\n" \ + "varying vec2 texCoord;\n" \ + "void main() {\n" \ + " texCoord = vTexCoord;\n" \ + " gl_Position = vec4 ( vPosition, 0.0f, 1.0f );\n" \ + "}"; + +const char fssOES[] = \ + "#extension GL_OES_EGL_image_external : require\n" \ + "precision mediump float;\n" \ + "uniform samplerExternalOES sTexture;\n" \ + "varying vec2 texCoord;\n" \ + "void main() {\n" \ + " gl_FragColor = texture2D(sTexture,texCoord);\n" \ + "}"; + +const char fss2D[] = \ + "precision mediump float;\n" \ + "uniform sampler2D sTexture;\n" \ + "varying vec2 texCoord;\n" \ + "void main() {\n" \ + " gl_FragColor = texture2D(sTexture,texCoord);\n" \ + "}"; + +GLuint progOES = 0; +GLuint prog2D = 0; + +GLint vPosOES, vTCOES; +GLint vPos2D, vTC2D; + +GLuint FBOtex = 0, FBOtex2 = 0; +GLuint FBO = 0; + +GLuint texOES = 0; +int texWidth = 0, texHeight = 0; + +enum ProcMode {PROC_MODE_CPU=1, PROC_MODE_OCL_DIRECT=2, PROC_MODE_OCL_OCV=3}; + +ProcMode procMode = PROC_MODE_CPU; + +static inline void deleteTex(GLuint* tex) +{ + if(tex && *tex) + { + glDeleteTextures(1, tex); + *tex = 0; + } +} + +static void releaseFBO() +{ + if (FBO != 0) + { + glBindFramebuffer(GL_FRAMEBUFFER, 0); + glDeleteFramebuffers(1, &FBO); + FBO = 0; + } + deleteTex(&FBOtex); + deleteTex(&FBOtex2); + glDeleteProgram(prog2D); + prog2D = 0; +} + +static inline void logShaderCompileError(GLuint shader, bool isProgram = false) +{ + GLchar msg[512]; + msg[0] = 0; + GLsizei len; + if(isProgram) + glGetProgramInfoLog(shader, sizeof(msg)-1, &len, msg); + else + glGetShaderInfoLog(shader, sizeof(msg)-1, &len, msg); + LOGE("Could not compile shader/program: %s", msg); +} + +static int makeShaderProg(const char* vss, const char* fss) +{ + LOGD("makeShaderProg: setup GL_VERTEX_SHADER"); + GLuint vshader = glCreateShader(GL_VERTEX_SHADER); + const GLchar* text = vss; + glShaderSource(vshader, 1, &text, 0); + glCompileShader(vshader); + GLint compiled; + glGetShaderiv(vshader, GL_COMPILE_STATUS, &compiled); + if (!compiled) { + logShaderCompileError(vshader); + glDeleteShader(vshader); + vshader = 0; + } + + LOGD("makeShaderProg: setup GL_FRAGMENT_SHADER"); + GLuint fshader = glCreateShader(GL_FRAGMENT_SHADER); + text = fss; + glShaderSource(fshader, 1, &text, 0); + glCompileShader(fshader); + glGetShaderiv(fshader, GL_COMPILE_STATUS, &compiled); + if (!compiled) { + logShaderCompileError(fshader); + glDeleteShader(fshader); + fshader = 0; + } + + LOGD("makeShaderProg: glCreateProgram"); + GLuint program = glCreateProgram(); + glAttachShader(program, vshader); + glAttachShader(program, fshader); + glLinkProgram(program); + GLint linked; + glGetProgramiv(program, GL_LINK_STATUS, &linked); + if (!linked) + { + logShaderCompileError(program, true); + glDeleteProgram(program); + program = 0; + } + glValidateProgram(program); + GLint validated; + glGetProgramiv(program, GL_VALIDATE_STATUS, &validated); + if (!validated) + { + logShaderCompileError(program, true); + glDeleteProgram(program); + program = 0; + } + + if(vshader) glDeleteShader(vshader); + if(fshader) glDeleteShader(fshader); + + return program; +} + + +static void initFBO(int width, int height) +{ + LOGD("initFBO(%d, %d)", width, height); + releaseFBO(); + + glGenTextures(1, &FBOtex2); + glBindTexture(GL_TEXTURE_2D, FBOtex2); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + + glGenTextures(1, &FBOtex); + glBindTexture(GL_TEXTURE_2D, FBOtex); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + + //int hFBO; + glGenFramebuffers(1, &FBO); + glBindFramebuffer(GL_FRAMEBUFFER, FBO); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, FBOtex, 0); + LOGD("initFBO status: %d", glGetError()); + + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) + LOGE("initFBO failed: %d", glCheckFramebufferStatus(GL_FRAMEBUFFER)); + + prog2D = makeShaderProg(vss, fss2D); + vPos2D = glGetAttribLocation(prog2D, "vPosition"); + vTC2D = glGetAttribLocation(prog2D, "vTexCoord"); + glEnableVertexAttribArray(vPos2D); + glEnableVertexAttribArray(vTC2D); +} + +void drawTex(int tex, GLenum texType, GLuint fbo) +{ + int64_t t = getTimeMs(); + //draw texture to FBO or to screen + glBindFramebuffer(GL_FRAMEBUFFER, fbo); + glViewport(0, 0, texWidth, texHeight); + + glClear(GL_COLOR_BUFFER_BIT); + + GLuint prog = texType == GL_TEXTURE_EXTERNAL_OES ? progOES : prog2D; + GLint vPos = texType == GL_TEXTURE_EXTERNAL_OES ? vPosOES : vPos2D; + GLint vTC = texType == GL_TEXTURE_EXTERNAL_OES ? vTCOES : vTC2D; + float* texCoord = texType == GL_TEXTURE_EXTERNAL_OES ? texCoordOES : texCoord2D; + glUseProgram(prog); + glVertexAttribPointer(vPos, 2, GL_FLOAT, false, 4*2, vertices); + glVertexAttribPointer(vTC, 2, GL_FLOAT, false, 4*2, texCoord); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(texType, tex); + glUniform1i(glGetUniformLocation(prog, "sTexture"), 0); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + glFlush(); + LOGD("drawTex(%u) costs %d ms", tex, getTimeInterval(t)); +} + +void drawFrameOrig() +{ + drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, 0); +} + +void procCPU(char* buff, int w, int h) +{ + int64_t t = getTimeMs(); + cv::Mat m(h, w, CV_8UC4, buff); + cv::Laplacian(m, m, CV_8U); + m *= 10; + LOGD("procCPU() costs %d ms", getTimeInterval(t)); +} + +void drawFrameProcCPU() +{ + int64_t t; + drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); + + // let's modify pixels in FBO texture in C++ code (on CPU) + const int BUFF_SIZE = 1<<24;//2k*2k*4; + static char tmpBuff[BUFF_SIZE]; + if(texWidth*texHeight > BUFF_SIZE) + { + LOGE("Internal temp buffer is too small, can't make CPU frame processing"); + return; + } + + // read + t = getTimeMs(); + glReadPixels(0, 0, texWidth, texHeight, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + LOGD("glReadPixels() costs %d ms", getTimeInterval(t)); + + // modify + procCPU(tmpBuff, texWidth, texHeight); + + // write back + t = getTimeMs(); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, texWidth, texHeight, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t)); + + // render to screen + drawTex(FBOtex, GL_TEXTURE_2D, 0); +} + +void procOCL_I2I(int texIn, int texOut, int w, int h); +void procOCL_OCV(int tex, int w, int h); +void drawFrameProcOCL() +{ + drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); + + // modify pixels in FBO texture using OpenCL and CL-GL interop + procOCL_I2I(FBOtex, FBOtex2, texWidth, texHeight); + + // render to screen + drawTex(FBOtex2, GL_TEXTURE_2D, 0); +} + +void drawFrameProcOCLOCV() +{ + drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); + + // modify pixels in FBO texture using OpenCL and CL-GL interop + procOCL_OCV(FBOtex, texWidth, texHeight); + + // render to screen + drawTex(FBOtex, GL_TEXTURE_2D, 0); +} + +extern "C" void drawFrame() +{ + LOGD("*** drawFrame() ***"); + int64_t t = getTimeMs(); + + switch(procMode) + { + case PROC_MODE_CPU: drawFrameProcCPU(); break; + case PROC_MODE_OCL_DIRECT: drawFrameProcOCL(); break; + case PROC_MODE_OCL_OCV: drawFrameProcOCLOCV(); break; + default: drawFrameOrig(); + } + + glFinish(); + LOGD("*** drawFrame() costs %d ms ***", getTimeInterval(t)); +} + +void closeCL(); +extern "C" void closeGL() +{ + closeCL(); + LOGD("closeGL"); + deleteTex(&texOES); + + glUseProgram(0); + glDeleteProgram(progOES); + progOES = 0; + + releaseFBO(); +} + +void initCL(); +extern "C" int initGL() +{ + LOGD("initGL"); + + closeGL(); + + const char* vs = (const char*)glGetString(GL_VERSION); + LOGD("GL_VERSION = %s", vs); + + progOES = makeShaderProg(vss, fssOES); + vPosOES = glGetAttribLocation(progOES, "vPosition"); + vTCOES = glGetAttribLocation(progOES, "vTexCoord"); + glEnableVertexAttribArray(vPosOES); + glEnableVertexAttribArray(vTCOES); + + glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + + texOES = 0; + glGenTextures(1, &texOES); + glBindTexture(GL_TEXTURE_EXTERNAL_OES, texOES); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + + initCL(); + + return texOES; +} + +extern "C" void changeSize(int width, int height) +{ + const int MAX_W=1<<11, MAX_H=1<<11; + LOGD("changeSize: %dx%d", width, height); + texWidth = width <= MAX_W ? width : MAX_W; + texHeight = height <= MAX_H ? height : MAX_H; + initFBO(texWidth, texHeight); +} + +extern "C" void setProcessingMode(int mode) +{ + switch(mode) + { + case PROC_MODE_CPU: procMode = PROC_MODE_CPU; break; + case PROC_MODE_OCL_DIRECT: procMode = PROC_MODE_OCL_DIRECT; break; + case PROC_MODE_OCL_OCV: procMode = PROC_MODE_OCL_OCV; break; + } +} diff --git a/samples/android/tutorial-4-opencl/jni/common.hpp b/samples/android/tutorial-4-opencl/jni/common.hpp new file mode 100644 index 0000000000..20b882a9f1 --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/common.hpp @@ -0,0 +1,19 @@ +#include +#define LOG_TAG "JNIRenderer" +//#define LOGD(...) +#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) +#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) + +#include // clock_gettime + +static inline int64_t getTimeMs() +{ + struct timespec now; + clock_gettime(CLOCK_MONOTONIC, &now); + return (int64_t) now.tv_sec*1000 + now.tv_nsec/1000000; +} + +static inline int getTimeInterval(int64_t startTime) +{ + return int(getTimeMs() - startTime); +} diff --git a/samples/android/tutorial-4-opencl/jni/jni.c b/samples/android/tutorial-4-opencl/jni/jni.c new file mode 100644 index 0000000000..7be35a0003 --- /dev/null +++ b/samples/android/tutorial-4-opencl/jni/jni.c @@ -0,0 +1,32 @@ +#include + +int initGL(); +void closeGL(); +void changeSize(int width, int height); +void drawFrame(); +void setProcessingMode(int mode); + +JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_initGL(JNIEnv * env, jclass cls) +{ + return initGL(); +} + +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_closeGL(JNIEnv * env, jclass cls) +{ + closeGL(); +} + +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_changeSize(JNIEnv * env, jclass cls, jint width, jint height) +{ + changeSize(width, height); +} + +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_drawFrame(JNIEnv * env, jclass cls) +{ + drawFrame(); +} + +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_setProcessingMode(JNIEnv * env, jclass cls, jint mode) +{ + setProcessingMode(mode); +} diff --git a/samples/android/tutorial-4-opencl/lint.xml b/samples/android/tutorial-4-opencl/lint.xml new file mode 100644 index 0000000000..ee0eead5bb --- /dev/null +++ b/samples/android/tutorial-4-opencl/lint.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/samples/android/tutorial-4-opencl/res/drawable/icon.png b/samples/android/tutorial-4-opencl/res/drawable/icon.png new file mode 100644 index 0000000000..630454927b Binary files /dev/null and b/samples/android/tutorial-4-opencl/res/drawable/icon.png differ diff --git a/samples/android/tutorial-4-opencl/res/layout/activity.xml b/samples/android/tutorial-4-opencl/res/layout/activity.xml new file mode 100644 index 0000000000..f871f6d697 --- /dev/null +++ b/samples/android/tutorial-4-opencl/res/layout/activity.xml @@ -0,0 +1,26 @@ + + + + + + + + + + diff --git a/samples/android/tutorial-4-opencl/res/menu/menu.xml b/samples/android/tutorial-4-opencl/res/menu/menu.xml new file mode 100644 index 0000000000..2b317e20f7 --- /dev/null +++ b/samples/android/tutorial-4-opencl/res/menu/menu.xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/samples/android/tutorial-4-opencl/res/values/strings.xml b/samples/android/tutorial-4-opencl/res/values/strings.xml new file mode 100644 index 0000000000..b7e6bed91a --- /dev/null +++ b/samples/android/tutorial-4-opencl/res/values/strings.xml @@ -0,0 +1,6 @@ + + + + OpenCV Tutorial 4 - Use OpenCL + + diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java new file mode 100644 index 0000000000..a0040ad3b4 --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java @@ -0,0 +1,282 @@ +package org.opencv.samples.tutorial4; + +import java.util.Arrays; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Point; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.util.Size; +import android.view.Surface; + +@SuppressLint("NewApi") public class Camera2Renderer extends MyGLRendererBase { + + protected final String LOGTAG = "Camera2Renderer"; + private CameraDevice mCameraDevice; + private CameraCaptureSession mCaptureSession; + private CaptureRequest.Builder mPreviewRequestBuilder; + private String mCameraID; + private Size mPreviewSize = new Size(1280, 720); + + private HandlerThread mBackgroundThread; + private Handler mBackgroundHandler; + private Semaphore mCameraOpenCloseLock = new Semaphore(1); + + Camera2Renderer(MyGLSurfaceView view) { + super(view); + } + + public void onResume() { + stopBackgroundThread(); + super.onResume(); + startBackgroundThread(); + } + + public void onPause() { + super.onPause(); + stopBackgroundThread(); + } + + boolean cacPreviewSize(final int width, final int height) { + Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height); + if(mCameraID == null) + return false; + CameraManager manager = (CameraManager) mView.getContext() + .getSystemService(Context.CAMERA_SERVICE); + try { + CameraCharacteristics characteristics = manager + .getCameraCharacteristics(mCameraID); + StreamConfigurationMap map = characteristics + .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + int bestWidth = 0, bestHeight = 0; + float aspect = (float)width / height; + for (Size psize : map.getOutputSizes(SurfaceTexture.class)) { + int w = psize.getWidth(), h = psize.getHeight(); + Log.d(LOGTAG, "trying size: "+w+"x"+h); + if ( width >= w && height >= h && + bestWidth <= w && bestHeight <= h && + Math.abs(aspect - (float)w/h) < 0.2 ) { + bestWidth = w; + bestHeight = h; + //mPreviewSize = psize; + } + } + Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight); + if( mPreviewSize.getWidth() == bestWidth && + mPreviewSize.getHeight() == bestHeight ) + return false; + else { + mPreviewSize = new Size(bestWidth, bestHeight); + return true; + } + } catch (CameraAccessException e) { + Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception"); + } catch (IllegalArgumentException e) { + Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception"); + } catch (SecurityException e) { + Log.e(LOGTAG, "cacPreviewSize - Security Exception"); + } + return false; + } + + protected void openCamera() { + Log.i(LOGTAG, "openCamera"); + //closeCamera(); + CameraManager manager = (CameraManager) mView.getContext() + .getSystemService(Context.CAMERA_SERVICE); + try { + for (String cameraID : manager.getCameraIdList()) { + CameraCharacteristics characteristics = manager + .getCameraCharacteristics(cameraID); + if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) + continue; + + mCameraID = cameraID; + break; + } + if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { + throw new RuntimeException( + "Time out waiting to lock camera opening."); + } + manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler); + } catch (CameraAccessException e) { + Log.e(LOGTAG, "OpenCamera - Camera Access Exception"); + } catch (IllegalArgumentException e) { + Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception"); + } catch (SecurityException e) { + Log.e(LOGTAG, "OpenCamera - Security Exception"); + } catch (InterruptedException e) { + Log.e(LOGTAG, "OpenCamera - Interrupted Exception"); + } + } + + protected void closeCamera() { + Log.i(LOGTAG, "closeCamera"); + try { + mCameraOpenCloseLock.acquire(); + if (null != mCaptureSession) { + mCaptureSession.close(); + mCaptureSession = null; + } + if (null != mCameraDevice) { + mCameraDevice.close(); + mCameraDevice = null; + } + } catch (InterruptedException e) { + throw new RuntimeException( + "Interrupted while trying to lock camera closing.", e); + } finally { + mCameraOpenCloseLock.release(); + } + } + + private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { + + @Override + public void onOpened(CameraDevice cameraDevice) { + mCameraDevice = cameraDevice; + mCameraOpenCloseLock.release(); + createCameraPreviewSession(); + } + + @Override + public void onDisconnected(CameraDevice cameraDevice) { + //mCameraOpenCloseLock.release(); + cameraDevice.close(); + mCameraDevice = null; + } + + @Override + public void onError(CameraDevice cameraDevice, int error) { + cameraDevice.close(); + mCameraDevice = null; + mCameraOpenCloseLock.release(); + } + + }; + + private void createCameraPreviewSession() { + Log.i(LOGTAG, "createCameraPreviewSession"); + try { + mCameraOpenCloseLock.acquire(); + if (null == mCameraDevice) { + mCameraOpenCloseLock.release(); + Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened"); + return; + } + if (null != mCaptureSession) { + mCameraOpenCloseLock.release(); + Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started"); + return; + } + if(null == mSTex) { + Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null"); + return; + } + Log.d(LOGTAG, "starting preview "+mPreviewSize.getWidth()+"x"+mPreviewSize.getHeight()); + mSTex.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); + + Surface surface = new Surface(mSTex); + Log.d(LOGTAG, "createCameraPreviewSession: surface = " + surface); + + mPreviewRequestBuilder = mCameraDevice + .createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + mPreviewRequestBuilder.addTarget(surface); + + mCameraDevice.createCaptureSession(Arrays.asList(surface), + new CameraCaptureSession.StateCallback() { + @Override + public void onConfigured( + CameraCaptureSession cameraCaptureSession) { + mCaptureSession = cameraCaptureSession; + try { + mPreviewRequestBuilder + .set(CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + mPreviewRequestBuilder + .set(CaptureRequest.CONTROL_AE_MODE, + CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + + mCaptureSession.setRepeatingRequest( + mPreviewRequestBuilder.build(), null, + mBackgroundHandler); + } catch (CameraAccessException e) { + Log.e(LOGTAG, "createCaptureSession failed"); + } + mCameraOpenCloseLock.release(); + } + + @Override + public void onConfigureFailed( + CameraCaptureSession cameraCaptureSession) { + Log.e(LOGTAG, "createCameraPreviewSession failed"); + mCameraOpenCloseLock.release(); + } + }, null); + } catch (CameraAccessException e) { + Log.e(LOGTAG, "createCameraPreviewSession"); + } catch (InterruptedException e) { + throw new RuntimeException( + "Interrupted while createCameraPreviewSession", e); + } + finally { + mCameraOpenCloseLock.release(); + } + } + + private void startBackgroundThread() { + Log.i(LOGTAG, "startBackgroundThread"); + mBackgroundThread = new HandlerThread("CameraBackground"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + private void stopBackgroundThread() { + Log.i(LOGTAG, "stopBackgroundThread"); + if(mBackgroundThread == null) + return; + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + Log.e(LOGTAG, "stopBackgroundThread"); + } + } + + @Override + protected void setCameraPreviewSize(int width, int height) { + //mPreviewSize = new Size(width, height); + if( !cacPreviewSize(width, height) ) + return; + try { + mCameraOpenCloseLock.acquire(); + if (null != mCaptureSession) { + mCaptureSession.close(); + mCaptureSession = null; + } + mCameraOpenCloseLock.release(); + createCameraPreviewSession(); + } catch (InterruptedException e) { + mCameraOpenCloseLock.release(); + throw new RuntimeException( + "Interrupted while setCameraPreviewSize.", e); + } + } +} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java new file mode 100644 index 0000000000..692ab9884e --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java @@ -0,0 +1,75 @@ +package org.opencv.samples.tutorial4; + +import java.io.IOException; +import java.util.List; + +import android.hardware.Camera; +import android.hardware.Camera.Size; +import android.util.Log; + +@SuppressWarnings("deprecation") +public class CameraRenderer extends MyGLRendererBase { + + protected final String LOGTAG = "CameraRenderer"; + private Camera mCamera; + boolean mPreviewStarted = false; + + CameraRenderer(MyGLSurfaceView view) { + super(view); + } + + protected void closeCamera() { + Log.i(LOGTAG, "closeCamera"); + if(mCamera != null) { + mCamera.stopPreview(); + mPreviewStarted = false; + mCamera.release(); + mCamera = null; + } + } + + protected void openCamera() { + Log.i(LOGTAG, "openCamera"); + closeCamera(); + mCamera = Camera.open(); + try { + mCamera.setPreviewTexture(mSTex); + } catch (IOException ioe) { + Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage()); + } + } + + public void setCameraPreviewSize(int width, int height) { + Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height); + if(mCamera == null) + return; + if(mPreviewStarted) { + mCamera.stopPreview(); + mPreviewStarted = false; + } + Camera.Parameters param = mCamera.getParameters(); + List psize = param.getSupportedPreviewSizes(); + int bestWidth = 0, bestHeight = 0; + if (psize.size() > 0) { + float aspect = (float)width / height; + for (Size size : psize) { + int w = size.width, h = size.height; + Log.d("Renderer", "checking camera preview size: "+w+"x"+h); + if ( w <= width && h <= height && + w >= bestWidth && h >= bestHeight && + Math.abs(aspect - (float)w/h) < 0.2 ) { + bestWidth = w; + bestHeight = h; + } + } + if(bestWidth > 0 && bestHeight > 0) { + param.setPreviewSize(bestWidth, bestHeight); + Log.i(LOGTAG, "size: "+bestWidth+" x "+bestHeight); + } + } + param.set("orientation", "landscape"); + mCamera.setParameters(param); + mCamera.startPreview(); + mPreviewStarted = true; + } +} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java new file mode 100644 index 0000000000..a0045696cf --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java @@ -0,0 +1,118 @@ +package org.opencv.samples.tutorial4; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import android.graphics.SurfaceTexture; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.os.Handler; +import android.os.Looper; +import android.util.Log; +import android.view.View; +import android.widget.TextView; + +public abstract class MyGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { + protected final String LOGTAG = "MyGLRendererBase"; + protected int frameCounter; + protected long lastNanoTime; + + protected SurfaceTexture mSTex; + protected MyGLSurfaceView mView; + protected TextView mFpsText; + + protected boolean mGLInit = false; + protected boolean mTexUpdate = false; + + MyGLRendererBase(MyGLSurfaceView view) { + mView = view; + } + + protected abstract void openCamera(); + protected abstract void closeCamera(); + protected abstract void setCameraPreviewSize(int width, int height); + + public void setFpsTextView(TextView fpsTV) + { + mFpsText = fpsTV; + } + + public void onResume() { + Log.i(LOGTAG, "onResume"); + frameCounter = 0; + lastNanoTime = System.nanoTime(); + } + + public void onPause() { + Log.i(LOGTAG, "onPause"); + mGLInit = false; + mTexUpdate = false; + closeCamera(); + if(mSTex != null) { + mSTex.release(); + mSTex = null; + NativeGLRenderer.closeGL(); + } + } + + @Override + public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) { + //Log.i(LOGTAG, "onFrameAvailable"); + mTexUpdate = true; + mView.requestRender(); + } + + @Override + public void onDrawFrame(GL10 gl) { + //Log.i(LOGTAG, "onDrawFrame"); + if (!mGLInit) + return; + + synchronized (this) { + if (mTexUpdate) { + mSTex.updateTexImage(); + mTexUpdate = false; + } + } + NativeGLRenderer.drawFrame(); + + // log FPS + frameCounter++; + if(frameCounter >= 10) + { + final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime)); + Log.i(LOGTAG, "drawFrame() FPS: "+fps); + if(mFpsText != null) { + Runnable fpsUpdater = new Runnable() { + public void run() { + mFpsText.setText("FPS: " + fps); + } + }; + new Handler(Looper.getMainLooper()).post(fpsUpdater); + } + frameCounter = 0; + lastNanoTime = System.nanoTime(); + } + } + + @Override + public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) { + Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")"); + NativeGLRenderer.changeSize(surfaceWidth, surfaceHeight); + setCameraPreviewSize(surfaceWidth, surfaceHeight); + } + + @Override + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + Log.i(LOGTAG, "onSurfaceCreated"); + String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION); + if (strGLVersion != null) + Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion); + + int hTex = NativeGLRenderer.initGL(); + mSTex = new SurfaceTexture(hTex); + mSTex.setOnFrameAvailableListener(this); + openCamera(); + mGLInit = true; + } +} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java new file mode 100644 index 0000000000..8556b41816 --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java @@ -0,0 +1,65 @@ +package org.opencv.samples.tutorial4; + +import android.app.Activity; +import android.content.Context; +import android.opengl.GLSurfaceView; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.SurfaceHolder; +import android.widget.TextView; + +public class MyGLSurfaceView extends GLSurfaceView { + + MyGLRendererBase mRenderer; + + public MyGLSurfaceView(Context context, AttributeSet attrs) { + super(context, attrs); + + if(android.os.Build.VERSION.SDK_INT >= 21) + mRenderer = new Camera2Renderer(this); + else + mRenderer = new CameraRenderer(this); + + setEGLContextClientVersion(2); + setRenderer(mRenderer); + setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + public void setFpsTextView(TextView tv) { + mRenderer.setFpsTextView(tv); + } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + super.surfaceCreated(holder); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + super.surfaceDestroyed(holder); + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { + super.surfaceChanged(holder, format, w, h); + } + + @Override + public void onResume() { + super.onResume(); + mRenderer.onResume(); + } + + @Override + public void onPause() { + mRenderer.onPause(); + super.onPause(); + } + + @Override + public boolean onTouchEvent(MotionEvent e) { + if(e.getAction() == MotionEvent.ACTION_DOWN) + ((Activity)getContext()).openOptionsMenu(); + return true; + } +} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java new file mode 100644 index 0000000000..40eef1e6a9 --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java @@ -0,0 +1,19 @@ +package org.opencv.samples.tutorial4; + +public class NativeGLRenderer { + static + { + System.loadLibrary("opencv_java3"); + System.loadLibrary("JNIrender"); + } + + public static final int PROCESSING_MODE_CPU = 1; + public static final int PROCESSING_MODE_OCL_DIRECT = 2; + public static final int PROCESSING_MODE_OCL_OCV = 3; + + public static native int initGL(); + public static native void closeGL(); + public static native void drawFrame(); + public static native void changeSize(int width, int height); + public static native void setProcessingMode(int mode); +} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java new file mode 100644 index 0000000000..cda66df7c4 --- /dev/null +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java @@ -0,0 +1,93 @@ +package org.opencv.samples.tutorial4; + +import android.app.Activity; +import android.content.pm.ActivityInfo; +import android.os.Bundle; +import android.view.Menu; +import android.view.MenuInflater; +import android.view.MenuItem; +import android.view.Window; +import android.view.WindowManager; +import android.widget.TextView; + +public class Tutorial4Activity extends Activity { + + private MyGLSurfaceView mView; + private TextView mProcMode; + + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + requestWindowFeature(Window.FEATURE_NO_TITLE); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, + WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); + + //mView = new MyGLSurfaceView(this, null); + //setContentView(mView); + setContentView(R.layout.activity); + mView = (MyGLSurfaceView) findViewById(R.id.my_gl_surface_view); + TextView tv = (TextView)findViewById(R.id.fps_text_view); + mView.setFpsTextView(tv); + mProcMode = (TextView)findViewById(R.id.proc_mode_text_view); + runOnUiThread(new Runnable() { + public void run() { + mProcMode.setText("Processing mode: CPU"); + } + }); + + NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_CPU); } + + @Override + protected void onPause() { + mView.onPause(); + super.onPause(); + } + + @Override + protected void onResume() { + super.onResume(); + mView.onResume(); + } + + @Override + public boolean onCreateOptionsMenu(Menu menu) { + MenuInflater inflater = getMenuInflater(); + inflater.inflate(R.menu.menu, menu); + return super.onCreateOptionsMenu(menu); + } + + @Override + public boolean onOptionsItemSelected(MenuItem item) { + switch (item.getItemId()) { + case R.id.cpu: + runOnUiThread(new Runnable() { + public void run() { + mProcMode.setText("Processing mode: CPU"); + } + }); + NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_CPU); + return true; + case R.id.ocl_direct: + runOnUiThread(new Runnable() { + public void run() { + mProcMode.setText("Processing mode: OpenCL direct"); + } + }); + NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_DIRECT); + return true; + case R.id.ocl_ocv: + runOnUiThread(new Runnable() { + public void run() { + mProcMode.setText("Processing mode: OpenCL via OpenCV (TAPI)"); + } + }); + NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_OCV); + return true; + default: + return false; + } + } +} \ No newline at end of file