summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRakesh Movva <r-movva@ti.com>2015-05-18 14:39:38 -0500
committerGerrit Code Review <gerrit2@DLEZVX23.itg.ti.com>2015-05-22 10:38:58 -0500
commit42d4731c437bcd2c933c161093ee53747e23e580 (patch)
tree307b28faae6e357ff12a7a77eeff05183830f82e
parentcd94ea69c14551d40294bda9a00929c23de8fba5 (diff)
downloaddra7xx-42d4731c437bcd2c933c161093ee53747e23e580.tar.gz
CameraHal: CameraHal migration from KitKat to Lollipop.
Enabling the camera preview in Lollipop. Change-Id: I6833fe82612645c9262f2c01d36e115a6ba656dd Signed-off-by: Rakesh Movva <r-movva@ti.com>
-rw-r--r--android-api.mk27
-rw-r--r--camera/ANativeWindowDisplayAdapter.cpp1265
-rw-r--r--camera/Android.mk146
-rw-r--r--camera/AppCallbackNotifier.cpp2002
-rw-r--r--camera/BaseCameraAdapter.cpp2724
-rw-r--r--camera/BufferSourceAdapter.cpp1007
-rw-r--r--camera/CameraHal.cpp4749
-rw-r--r--camera/CameraHalCommon.cpp233
-rw-r--r--camera/CameraHalUtilClasses.cpp361
-rw-r--r--camera/CameraHal_Module.cpp830
-rw-r--r--camera/CameraParameters.cpp241
-rw-r--r--camera/CameraProperties.cpp135
-rw-r--r--camera/DecoderFactory.cpp67
-rw-r--r--camera/Decoder_libjpeg.cpp301
-rw-r--r--camera/DrmMemoryManager.cpp246
-rw-r--r--camera/Encoder_libjpeg.cpp549
-rw-r--r--camera/FrameDecoder.cpp204
-rw-r--r--camera/NV12_resize.c307
-rw-r--r--camera/NV12_resize.cpp290
-rw-r--r--camera/OmxFrameDecoder.cpp1077
-rw-r--r--camera/SensorListener.cpp236
-rw-r--r--camera/SwFrameDecoder.cpp85
-rw-r--r--camera/TICameraParameters.cpp236
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp1802
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp369
-rw-r--r--camera/V4LCameraAdapter/V4LM2M.cpp625
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h196
-rw-r--r--camera/inc/BaseCameraAdapter.h308
-rw-r--r--camera/inc/BufferSourceAdapter.h228
-rw-r--r--camera/inc/CameraHal.h1550
-rw-r--r--camera/inc/CameraProperties.h242
-rw-r--r--camera/inc/Common.h65
-rw-r--r--camera/inc/DecoderFactory.h35
-rw-r--r--camera/inc/Decoder_libjpeg.h58
-rw-r--r--camera/inc/Encoder_libjpeg.h226
-rw-r--r--camera/inc/FrameDecoder.h173
-rw-r--r--camera/inc/General3A_Settings.h295
-rw-r--r--camera/inc/NV12_resize.h135
-rw-r--r--camera/inc/OmxFrameDecoder.h204
-rw-r--r--camera/inc/SensorListener.h105
-rw-r--r--camera/inc/SwFrameDecoder.h47
-rw-r--r--camera/inc/TICameraParameters.h265
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h269
-rw-r--r--camera/inc/VideoMetadata.h32
-rw-r--r--kernel-headers/uapi/linux/v4l2-common.h71
-rw-r--r--kernel-headers/uapi/linux/v4l2-controls.h898
-rw-r--r--kernel-headers/uapi/linux/v4l2-dv-timings.h826
-rw-r--r--kernel-headers/uapi/linux/v4l2-mediabus.h135
-rw-r--r--kernel-headers/uapi/linux/v4l2-subdev.h180
-rw-r--r--kernel-headers/uapi/linux/videodev2.h1966
-rw-r--r--libtiutils/Android.mk49
-rw-r--r--libtiutils/DebugUtils.cpp96
-rw-r--r--libtiutils/DebugUtils.h397
-rw-r--r--libtiutils/ErrorUtils.cpp141
-rw-r--r--libtiutils/ErrorUtils.h49
-rw-r--r--libtiutils/MessageQueue.cpp419
-rw-r--r--libtiutils/MessageQueue.h98
-rw-r--r--libtiutils/Semaphore.cpp232
-rw-r--r--libtiutils/Semaphore.h63
-rw-r--r--libtiutils/Status.h67
-rw-r--r--libtiutils/UtilsCommon.h99
61 files changed, 30333 insertions, 0 deletions
diff --git a/android-api.mk b/android-api.mk
new file mode 100644
index 0000000..ff75de0
--- /dev/null
+++ b/android-api.mk
@@ -0,0 +1,27 @@
+
+# Makefile variables and C/C++ macros to recognize API level
+ANDROID_API_JB_MR1_OR_LATER :=
+ANDROID_API_JB_OR_LATER :=
+ANDROID_API_ICS_OR_LATER :=
+ANDROID_API_CFLAGS :=
+
+ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 17 || echo 1),)
+ ANDROID_API_JB_MR1_OR_LATER := true
+ ANDROID_API_CFLAGS += -DANDROID_API_JB_MR1_OR_LATER
+endif
+ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 16 || echo 1),)
+ ANDROID_API_JB_OR_LATER := true
+ ANDROID_API_CFLAGS += -DANDROID_API_JB_OR_LATER
+endif
+ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 14 || echo 1),)
+ ANDROID_API_ICS_OR_LATER := true
+ ANDROID_API_CFLAGS += -DANDROID_API_ICS_OR_LATER
+endif
+
+define clear-android-api-vars
+$(eval ANDROID_API_JB_MR1_OR_LATER:=) \
+$(eval ANDROID_API_JB_OR_LATER:=) \
+$(eval ANDROID_API_ICS_OR_LATER:=) \
+$(eval ANDROID_API_CFLAGS:=) \
+$(eval clear-android-api-vars:=)
+endef
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
new file mode 100644
index 0000000..d19c476
--- /dev/null
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -0,0 +1,1265 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANativeWindowDisplayAdapter.h"
+#include <OMX_IVCommon.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+///Constant declarations
+///@todo Check the time units
+const int ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT = 1000; // seconds
+
+//Suspends buffers after given amount of failed dq's
+const int ANativeWindowDisplayAdapter::FAILED_DQS_TO_SUSPEND = 3;
+
+
+OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
+{
+ OMX_COLOR_FORMATTYPE pixFormat;
+
+ if ( parameters_format != NULL )
+ {
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ }
+ else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+
+ return pixFormat;
+}
+
+/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/
+
+
+/**
+ * Display Adapter class STARTS here..
+ */
+ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
+ mDisplayState(ANativeWindowDisplayAdapter::DISPLAY_INIT),
+ mDisplayEnabled(false),
+ mBufferCount(0),
+ mUseExternalBufferLocking(false)
+
+
+
+{
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ mShotToShot = false;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+ mStandbyToShot.tv_sec = 0;
+ mStandbyToShot.tv_usec = 0;
+ mMeasureStandby = false;
+#endif
+
+ mPixelFormat = NULL;
+ mBuffers = NULL;
+ mOffsetsMap = NULL;
+ mFrameProvider = NULL;
+ mANativeWindow = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ mSuspend = false;
+ mFailedDQs = 0;
+
+ mPaused = false;
+ mXOff = -1;
+ mYOff = -1;
+ mFirstInit = false;
+
+ mFD = -1;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
+{
+ Utils::Semaphore sem;
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ ///If Frame provider exists
+ if (mFrameProvider) {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ ///The ANativeWindow object will get destroyed here
+ destroy();
+
+ ///If Display thread exists
+ if(mDisplayThread.get())
+ {
+ ///Kill the display thread
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_EXIT;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Exit and cleanup the thread
+ mDisplayThread->requestExitAndWait();
+
+ // Delete the display thread
+ mDisplayThread.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Create the display thread
+ mDisplayThread = new DisplayThread(this);
+ if ( !mDisplayThread.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mDisplayThread->run("DisplayThread", android::PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't run display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int ANativeWindowDisplayAdapter::setPreviewWindow(preview_stream_ops_t* window)
+{
+ LOG_FUNCTION_NAME;
+ ///Note that Display Adapter cannot work without a valid window object
+ if ( !window)
+ {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( window == mANativeWindow ) {
+ return ALREADY_EXISTS;
+ }
+
+ ///Destroy the existing window object, if it exists
+ destroy();
+
+ ///Move to new window obj
+ mANativeWindow = window;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ // Check for NULL pointer
+ if ( !frameProvider ) {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ //Release any previous frame providers
+ if ( NULL != mFrameProvider ) {
+ delete mFrameProvider;
+ }
+
+ /** Dont do anything here, Just save the pointer for use when display is
+ actually enabled or disabled
+ */
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallbackRelay);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier ) {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != refTime )
+ {
+ android::AutoMutex lock(mLock);
+ memcpy(&mStartCapture, refTime, sizeof(struct timeval));
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#endif
+
+
+int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime)
+{
+ Utils::Semaphore sem;
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("Display is already enabled");
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( NULL != refTime )
+ {
+ android::AutoMutex lock(mLock);
+ memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
+ mMeasureStandby = true;
+ }
+
+#endif
+
+ //Send START_DISPLAY COMMAND to display thread. Display thread will start and then wait for a message
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_START;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Register with the frame provider for frames
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+
+ mDisplayEnabled = true;
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+
+ CAMHAL_LOGVB("mPreviewWidth = %d mPreviewHeight = %d", mPreviewWidth, mPreviewHeight);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
+{
+ status_t ret = NO_ERROR;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ LOG_FUNCTION_NAME;
+
+ if(!mDisplayEnabled)
+ {
+ CAMHAL_LOGDA("Display is already disabled");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ // Unregister with the frame provider here
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ mFrameProvider->removeFramePointers();
+
+ if ( NULL != mDisplayThread.get() )
+ {
+ //Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
+ // and then wait for message
+ Utils::Semaphore sem;
+ sem.Create();
+ Utils::Message msg;
+ msg.command = DisplayThread::DISPLAY_STOP;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK for display to be disabled
+
+ sem.Wait();
+
+ }
+
+ android::AutoMutex lock(mLock);
+ {
+ ///Reset the display enabled flag
+ mDisplayEnabled = false;
+
+ // Reset pause flag since display is being disabled
+ mPaused = false;
+
+ ///Reset the offset values
+ mXOff = -1;
+ mYOff = -1;
+
+ ///Reset the frame width and height values
+ mFrameWidth =0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ if(cancel_buffer)
+ {
+ // Return the buffers to ANativeWindow here, the mFramesWithCameraAdapterMap is also cleared inside
+ returnBuffersToWindow();
+ }
+ else
+ {
+ mANativeWindow = NULL;
+ // Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+ }
+
+
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ android::AutoMutex lock(mLock);
+ mPaused = pause;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void ANativeWindowDisplayAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Check if the display is disabled, if not disable it
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("WARNING: Calling destroy of Display adapter when display enabled. Disabling display..");
+ disableDisplay(false);
+ }
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+// Implementation of inherited interfaces
+CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ int undequeued = 0;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ mFramesType.clear();
+
+ if ( NULL == mANativeWindow ) {
+ return NULL;
+ }
+
+ // Set gralloc usage bits for window.
+ err = mANativeWindow->set_usage(mANativeWindow, CAMHAL_GRALLOC_USAGE);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setUsage failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
+ ///Set the number of buffers needed for camera preview
+ err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setBufferCount failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+
+
+ // Set window geometry
+ err = mANativeWindow->set_buffers_geometry(
+ mANativeWindow,
+ width,
+ height,
+ /*toOMXPixFormat(format)*/HAL_PIXEL_FORMAT_TI_NV12); // Gralloc only supports NV12 alloc!
+
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+
+ ///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case)
+ ///re-allocate buffers using ANativeWindow and then get them
+ ///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc
+ if ( mBuffers == NULL )
+ {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
+
+ for ( i=0; i < mBufferCount; i++ )
+ {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &handle, &stride);
+
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ goto fail;
+ }
+
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, i);
+
+ // Tag remaining preview buffers as preview frames
+ if ( i >= ( mBufferCount - undequeued ) ) {
+ mFramesType.add( (int) mBuffers[i].opaque,
+ CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ bytes = CameraHal::calculateBufferSize(format, width, height);
+
+ }
+
+ // lock the initial queueable buffers
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = width;
+ bounds.bottom = height;
+
+ for( i = 0; i < mBufferCount-undequeued; i++ )
+ {
+ android_ycbcr ycbcr = android_ycbcr();
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+
+ mANativeWindow->lock_buffer(mANativeWindow, handle);
+
+ mapper.lockYCbCr(*handle, CAMHAL_GRALLOC_USAGE, bounds, &ycbcr);
+ mBuffers[i].mapped = ycbcr.y;
+ mBuffers[i].ycbcr = ycbcr;
+ mFrameProvider->addFramePointers(&mBuffers[i], &ycbcr);
+ if (mUseExternalBufferLocking) {
+ mapper.unlock(*handle);
+ }
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++)
+ {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ err = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ goto fail;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
+ //LOCK UNLOCK TO GET YUV POINTERS
+ android_ycbcr ycbcr = android_ycbcr();
+ mapper.lockYCbCr(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, &ycbcr);
+ mBuffers[i].mapped = ycbcr.y;
+ mBuffers[i].ycbcr = ycbcr;
+ mFrameProvider->addFramePointers(&mBuffers[i], &ycbcr);
+ mapper.unlock(*(buffer_handle_t *) mBuffers[i].opaque);
+ }
+
+ mFirstInit = true;
+ mFrameWidth = width;
+ mFrameHeight = height;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ status_t err = mANativeWindow->cancel_buffer(mANativeWindow,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed w/ error 0x%08x", err);
+ break;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
+ }
+
+ freeBufferList(mBuffers);
+
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(NO_MEMORY);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+CameraBuffer* ANativeWindowDisplayAdapter::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
+
+ return NULL;
+}
+
+uint32_t * ANativeWindowDisplayAdapter::getOffsets()
+{
+ const int lnumBufs = mBufferCount;
+
+ LOG_FUNCTION_NAME;
+
+ // TODO(XXX): Need to remove getOffsets from the API. No longer needed
+
+ if ( NULL == mANativeWindow )
+ {
+ CAMHAL_LOGEA("mANativeWindow reference is missing");
+ goto fail;
+ }
+
+ if( mBuffers == NULL)
+ {
+ CAMHAL_LOGEA("Buffers not allocated yet!!");
+ goto fail;
+ }
+
+ if(mOffsetsMap == NULL)
+ {
+ mOffsetsMap = new uint32_t[lnumBufs];
+ for(int i = 0; i < mBufferCount; i++)
+ {
+ mOffsetsMap[i] = 0;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mOffsetsMap;
+
+ fail:
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(INVALID_OPERATION);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+status_t ANativeWindowDisplayAdapter::minUndequeueableBuffers(int& undequeueable) {
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ if(!mANativeWindow) {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeueable);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
+
+ if ( NO_INIT == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int ANativeWindowDisplayAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mFD == -1)
+ {
+ buffer_handle_t *handle = (buffer_handle_t *)mBuffers[0].opaque;
+ IMG_native_handle_t *img = (IMG_native_handle_t *)handle;
+ // TODO: should we dup the fd? not really necessary and another thing for ANativeWindow
+ // to manage and close...
+
+ mFD = dup(img->fd[0]);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mFD;
+
+}
+
+status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
+{
+ status_t ret = NO_ERROR;
+
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ //Give the buffers back to display here - sort of free it
+ if (mANativeWindow)
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
+
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+ }
+
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_INIT == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ return ret;
+ } else if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::cancelBuffer() failed: %s (%d)",
+ strerror(-ret),
+ -ret);
+ return ret;
+ }
+ }
+ else
+ CAMHAL_LOGE("mANativeWindow is NULL");
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+
+ return ret;
+
+}
+
+int ANativeWindowDisplayAdapter::freeBufferList(CameraBuffer * buflist)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mLock);
+
+ if(mBuffers != buflist)
+ {
+ CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!");
+ if (mBuffers != NULL)
+ delete []mBuffers;
+ mBuffers = NULL;
+ }
+
+ /* FIXME this will probably want the list that was just deleted */
+ returnBuffersToWindow();
+
+ if ( NULL != buflist )
+ {
+ delete [] buflist;
+ mBuffers = NULL;
+ }
+
+ if( mBuffers != NULL)
+ {
+ delete [] mBuffers;
+ mBuffers = NULL;
+ }
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if( mFD != -1)
+ {
+ close(mFD); // close duped handle
+ mFD = -1;
+ }
+
+ mFramesType.clear();
+
+ return NO_ERROR;
+}
+
+
+bool ANativeWindowDisplayAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+void ANativeWindowDisplayAdapter::displayThread()
+{
+ bool shouldLive = true;
+ int timeout = 0;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ while(shouldLive)
+ {
+ ret = Utils::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ , &mDisplayQ
+ , NULL
+ , ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
+
+ if ( !mDisplayThread->msgQ().isEmpty() )
+ {
+ ///Received a message from CameraHal, process it
+ shouldLive = processHalMsg();
+
+ }
+ else if( !mDisplayQ.isEmpty())
+ {
+ if ( mDisplayState== ANativeWindowDisplayAdapter::DISPLAY_INIT )
+ {
+
+ ///If display adapter is not started, continue
+ continue;
+
+ }
+ else
+ {
+ Utils::Message msg;
+ ///Get the dummy msg from the displayQ
+ if(mDisplayQ.get(&msg)!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Error in getting message from display Q");
+ continue;
+ }
+
+ // There is a frame from ANativeWindow for us to dequeue
+ // We dequeue and return the frame back to Camera adapter
+ if(mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED)
+ {
+ handleFrameReturn();
+ }
+
+ if (mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_EXITED)
+ {
+ ///we exit the thread even though there are frames still to dequeue. They will be dequeued
+ ///in disableDisplay
+ shouldLive = false;
+ }
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+bool ANativeWindowDisplayAdapter::processHalMsg()
+{
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+
+ mDisplayThread->msgQ().get(&msg);
+ bool ret = true, invalidCommand = false;
+
+ switch ( msg.command )
+ {
+
+ case DisplayThread::DISPLAY_START:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_START command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STARTED;
+
+ break;
+
+ case DisplayThread::DISPLAY_STOP:
+
+ ///@bug There is no API to disable SF without destroying it
+ ///@bug Buffers might still be w/ display and will get displayed
+ ///@remarks Ideal seqyence should be something like this
+ ///mOverlay->setParameter("enabled", false);
+ CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED;
+
+ // flush frame message queue
+ while ( !mDisplayQ.isEmpty() ) {
+ Utils::Message message;
+ mDisplayQ.get(&message);
+ }
+
+ break;
+
+ case DisplayThread::DISPLAY_EXIT:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_EXIT command from Camera HAL.");
+ CAMHAL_LOGDA("Stopping display thread...");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_EXITED;
+ ///Note that the SF can have pending buffers when we disable the display
+ ///This is normal and the expectation is that they may not be displayed.
+ ///This is to ensure that the user experience is not impacted
+ ret = false;
+ break;
+
+ default:
+
+ CAMHAL_LOGEB("Invalid Display Thread Command 0x%x.", msg.command);
+ invalidCommand = true;
+
+ break;
+ }
+
+ ///Signal the semaphore if it is sent as part of the message
+ if ( ( msg.arg1 ) && ( !invalidCommand ) )
+ {
+
+ CAMHAL_LOGDA("+Signalling display semaphore");
+ Utils::Semaphore &sem = *((Utils::Semaphore*)msg.arg1);
+
+ sem.Signal();
+
+ CAMHAL_LOGDA("-Signalling display semaphore");
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame)
+{
+ status_t ret = NO_ERROR;
+ uint32_t actualFramesWithDisplay = 0;
+ android_native_buffer_t *buffer = NULL;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ int i;
+
+ ///@todo Do cropping based on the stabilized frame coordinates
+ ///@todo Insert logic to drop frames here based on refresh rate of
+ ///display or rendering rate whichever is lower
+ ///Queue the buffer to overlay
+
+ if ( NULL == mANativeWindow ) {
+ return NO_INIT;
+ }
+
+ if (!mBuffers || !dispFrame.mBuffer) {
+ CAMHAL_LOGEA("NULL sent to PostFrame");
+ return BAD_VALUE;
+ }
+
+ for ( i = 0; i < mBufferCount; i++ )
+ {
+ if ( dispFrame.mBuffer == &mBuffers[i] )
+ {
+ break;
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( mMeasureStandby ) {
+ CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
+ mMeasureStandby = false;
+ } else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) {
+ CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
+ mShotToShot = true;
+ } else if ( mShotToShot ) {
+ CameraHal::PPM("Shot to shot: ", &mStartCapture);
+ mShotToShot = false;
+ }
+
+#endif
+
+ android::AutoMutex lock(mLock);
+
+ mFramesType.add( (int)mBuffers[i].opaque, dispFrame.mType);
+
+ if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED &&
+ (!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
+ !mSuspend)
+ {
+ uint32_t xOff, yOff;
+
+ CameraHal::getXYFromOffset(&xOff, &yOff, dispFrame.mOffset, PAGE_SIZE, mPixelFormat);
+
+ // Set crop only if current x and y offsets do not match with frame offsets
+ if ((mXOff != xOff) || (mYOff != yOff)) {
+ CAMHAL_LOGDB("offset = %u left = %d top = %d right = %d bottom = %d",
+ dispFrame.mOffset, xOff, yOff ,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
+
+ // We'll ignore any errors here, if the surface is
+ // already invalid, we'll know soon enough.
+ mANativeWindow->set_crop(mANativeWindow, xOff, yOff,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
+
+ // Update the current x and y offsets
+ mXOff = xOff;
+ mYOff = yOff;
+ }
+
+ {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before sending to display
+ mapper.unlock(*handle);
+ }
+ ret = mANativeWindow->enqueue_buffer(mANativeWindow, handle);
+ }
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
+
+
+ // HWComposer has not minimum buffer requirement. We should be able to dequeue
+ // the buffer immediately
+ Utils::Message msg;
+ mDisplayQ.put(&msg);
+
+ }
+ else
+ {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+ }
+
+ // cancel buffer and dequeue another one
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::cancelBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
+
+ Utils::Message msg;
+ mDisplayQ.put(&msg);
+ ret = NO_ERROR;
+ }
+
+ return ret;
+}
+
+
+bool ANativeWindowDisplayAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t *buf;
+ int i = 0;
+ unsigned int k;
+ int stride; // dummy variable to get stride
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ CameraFrame::FrameType frameType = CameraFrame::PREVIEW_FRAME_SYNC;
+
+ android_ycbcr ycbcr = android_ycbcr();
+
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ if ( NULL == mANativeWindow ) {
+ return false;
+ }
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return false;
+ }
+
+ err = mANativeWindow->lock_buffer(mANativeWindow, buf);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::lockBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( NO_INIT == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return false;
+ }
+
+ for(i = 0; i < mBufferCount; i++)
+ {
+ if (mBuffers[i].opaque == buf)
+ break;
+ }
+ if (i == mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
+ if (!mUseExternalBufferLocking) {
+ // lock buffer before sending to FrameProvider for filling
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mFrameWidth;
+ bounds.bottom = mFrameHeight;
+
+ int lock_try_count = 0;
+ while (mapper.lockYCbCr(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, &ycbcr) < 0){
+ if (++lock_try_count > LOCK_BUFFER_TRIES){
+ if ( NULL != mErrorNotifier.get() ){
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ return false;
+ }
+ CAMHAL_LOGEA("Gralloc Lock FrameReturn Error: Sleeping 15ms");
+ usleep(15000);
+ }
+ }
+
+ {
+ android::AutoMutex lock(mLock);
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ for( k = 0; k < mFramesType.size() ; k++) {
+ if(mFramesType.keyAt(k) == (int)mBuffers[i].opaque)
+ break;
+ }
+
+ if ( k == mFramesType.size() ) {
+ CAMHAL_LOGE("Frame type for preview buffer 0%x not found!!", mBuffers[i].opaque);
+ return false;
+ }
+
+ frameType = (CameraFrame::FrameType) mFramesType.valueAt(k);
+ mFramesType.removeItem((int) mBuffers[i].opaque);
+ }
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1);
+ mFrameProvider->returnFrame(&mBuffers[i], frameType);
+
+ return true;
+}
+
+void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame)
+{
+
+ if ( NULL != caFrame )
+ {
+ if ( NULL != caFrame->mCookie )
+ {
+ ANativeWindowDisplayAdapter *da = (ANativeWindowDisplayAdapter*) caFrame->mCookie;
+ da->frameCallback(caFrame);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p", caFrame, caFrame->mCookie);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Camera Frame = %p", caFrame);
+ }
+
+}
+
+void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
+{
+ ///Call queueBuffer of overlay in the context of the callback thread
+
+ DisplayFrame df;
+ df.mBuffer = caFrame->mBuffer;
+ df.mType = (CameraFrame::FrameType) caFrame->mFrameType;
+ df.mOffset = caFrame->mOffset;
+ df.mWidthStride = caFrame->mAlignment;
+ df.mLength = caFrame->mLength;
+ df.mWidth = caFrame->mWidth;
+ df.mHeight = caFrame->mHeight;
+ PostFrame(df);
+}
+
+void ANativeWindowDisplayAdapter::setExternalLocking(bool extBuffLocking)
+{
+ mUseExternalBufferLocking = extBuffLocking;
+}
+
+/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Android.mk b/camera/Android.mk
new file mode 100644
index 0000000..1fb2780
--- /dev/null
+++ b/camera/Android.mk
@@ -0,0 +1,146 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(LOCAL_PATH)/../android-api.mk
+
+ifeq ($(TARGET_BOARD_PLATFORM), $(filter $(TARGET_BOARD_PLATFORM), jacinto6))
+OMAP4_CAMERA_HAL_USES:= USB
+
+CAMERAHAL_CFLAGS += $(ANDROID_API_CFLAGS)
+
+#ifdef TI_CAMERAHAL_DEBUG_ENABLED
+ # Enable CameraHAL debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG
+#endif
+
+#ifdef TI_CAMERAHAL_VERBOSE_DEBUG_ENABLED
+ # Enable CameraHAL verbose debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG_VERBOSE
+#endif
+
+ifdef TI_CAMERAHAL_DEBUG_FUNCTION_NAMES
+ # Enable CameraHAL function enter/exit logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_FUNCTION_LOGGER_ENABLE
+endif
+
+ifdef TI_CAMERAHAL_DEBUG_TIMESTAMPS
+ # Enable timestamp logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_DEBUG_USE_TIMESTAMPS
+endif
+
+ifndef TI_CAMERAHAL_DONT_USE_RAW_IMAGE_SAVING
+ # Enabled saving RAW images to file
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_USE_RAW_IMAGE_SAVING
+endif
+
+ifdef TI_CAMERAHAL_PROFILING
+ # Enable OMX Camera component profiling
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_OMX_PROFILING
+endif
+
+ifeq ($(ENHANCED_DOMX),true)
+ CAMERAHAL_CFLAGS += -DENHANCED_DOMX
+endif
+
+ifdef ARCH_ARM_HAVE_NEON
+ CAMERAHAL_CFLAGS += -DARCH_ARM_HAVE_NEON
+endif
+
+CAMERAHAL_CFLAGS += -DLOG_TAG=\"CameraHal\" -DSUPPORT_ANDROID_FRAMEBUFFER_HAL -DSUPPORT_ANDROID_MEMTRACK_HAL
+
+TI_CAMERAHAL_COMMON_INCLUDES := \
+ $(LOCAL_PATH)/../hwcomposer \
+ hardware/libhardware/include/hardware \
+ hardware/ti/dra7xx/camera/inc \
+ system/media/camera/include \
+ external/jpeg \
+ external/jhead \
+ external/libdrm/include/drm \
+ external/libdrm/omap \
+ external/libdrm \
+ $(LOCAL_PATH)/../libtiutils \
+
+TI_CAMERAHAL_COMMON_INCLUDES += \
+ frameworks/native/include/media/hardware \
+ frameworks/native/include/media/openmax
+
+TI_CAMERAHAL_COMMON_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ BufferSourceAdapter.cpp \
+ CameraProperties.cpp \
+ BaseCameraAdapter.cpp \
+ DrmMemoryManager.cpp \
+ Encoder_libjpeg.cpp \
+ Decoder_libjpeg.cpp \
+ SensorListener.cpp \
+ NV12_resize.cpp \
+ CameraParameters.cpp \
+ TICameraParameters.cpp \
+ CameraHalCommon.cpp \
+ FrameDecoder.cpp \
+ SwFrameDecoder.cpp \
+ DecoderFactory.cpp
+
+TI_CAMERAHAL_USB_SRC := \
+ V4LCameraAdapter/V4LCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCapabilities.cpp
+
+ifeq ($(TARGET_BOARD_PLATFORM), $(filter $(TARGET_BOARD_PLATFORM), jacinto6))
+TI_CAMERAHAL_USB_SRC += \
+ V4LCameraAdapter/V4LM2M.cpp
+CAMERAHAL_CFLAGS += -DUSE_V4LM2M
+endif
+
+ifdef OMAP_ENHANCEMENT_CPCAM
+TI_CAMERAHAL_COMMON_STATIC_LIBRARIES += \
+ libcpcamcamera_client
+endif
+
+
+ifeq ($(OMAP4_CAMERA_HAL_USES),USB)
+
+
+# ====================
+# USB Camera Adapter
+# --------------------
+
+include $(CLEAR_VARS)
+
+CAMERAHAL_CFLAGS += -DV4L_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_USB_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libtiutils \
+ libdrm \
+ libdrm_omap \
+ libcamera_client \
+ libgui \
+ libjpeg \
+ libjhead
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
+endif
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
new file mode 100644
index 0000000..8bc10b1
--- /dev/null
+++ b/camera/AppCallbackNotifier.cpp
@@ -0,0 +1,2002 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CameraHal.h"
+#include "VideoMetadata.h"
+#include "Encoder_libjpeg.h"
+#include <MetadataBufferType.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include "NV12_resize.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
+android::KeyedVector<void*, android::sp<Encoder_libjpeg> > gEncoderQueue;
+
+void AppCallbackNotifierEncoderCallback(void* main_jpeg,
+ void* thumb_jpeg,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3,
+ void* cookie4,
+ bool canceled)
+{
+ if (cookie1 && !canceled) {
+ AppCallbackNotifier* cb = (AppCallbackNotifier*) cookie1;
+ cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3, cookie4);
+ }
+
+ if (main_jpeg) {
+ free(main_jpeg);
+ }
+
+ if (thumb_jpeg) {
+ if (((Encoder_libjpeg::params *) thumb_jpeg)->dst) {
+ free(((Encoder_libjpeg::params *) thumb_jpeg)->dst);
+ }
+ free(thumb_jpeg);
+ }
+}
+
+/*--------------------NotificationHandler Class STARTS here-----------------------------*/
+
+void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3)
+{
+ camera_memory_t* encoded_mem = NULL;
+ Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL;
+ size_t jpeg_size;
+ uint8_t* src = NULL;
+ CameraBuffer *camera_buffer;
+ android::sp<Encoder_libjpeg> encoder = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ camera_memory_t* picture = NULL;
+
+ {
+ android::AutoMutex lock(mLock);
+
+ if (!main_jpeg) {
+ goto exit;
+ }
+
+ encoded_mem = (camera_memory_t*) cookie1;
+ main_param = (Encoder_libjpeg::params *) main_jpeg;
+ jpeg_size = main_param->jpeg_size;
+ camera_buffer = (CameraBuffer *)cookie3;
+ src = main_param->src;
+
+ if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) {
+ if (cookie2) {
+ ExifElementsTable* exif = (ExifElementsTable*) cookie2;
+ Section_t* exif_section = NULL;
+
+ exif->insertExifToJpeg((unsigned char*) encoded_mem->data, jpeg_size);
+
+ if(thumb_jpeg) {
+ thumb_param = (Encoder_libjpeg::params *) thumb_jpeg;
+ exif->insertExifThumbnailImage((const char*)thumb_param->dst,
+ (int)thumb_param->jpeg_size);
+ }
+
+ exif_section = FindSection(M_EXIF);
+
+ if (exif_section) {
+ picture = mRequestMemory(-1, jpeg_size + exif_section->Size, 1, NULL);
+ if (picture && picture->data) {
+ exif->saveJpeg((unsigned char*) picture->data, jpeg_size + exif_section->Size);
+ }
+ }
+ delete exif;
+ cookie2 = NULL;
+ } else {
+ picture = mRequestMemory(-1, jpeg_size, 1, NULL);
+ if (picture && picture->data) {
+ memcpy(picture->data, encoded_mem->data, jpeg_size);
+ }
+ }
+ }
+ } // scope for mutex lock
+
+ if (!mRawAvailable) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+ // Send the callback to the application only if the notifier is started and the message is enabled
+ if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) &&
+ (mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
+ {
+ android::AutoMutex lock(mBurstLock);
+
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ if ( mBurst )
+ {
+ mDataCb(CAMERA_MSG_COMPRESSED_BURST_IMAGE, picture, 0, NULL, mCallbackCookie);
+
+ }
+ else
+#endif
+ {
+ mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, picture, 0, NULL, mCallbackCookie);
+ }
+ }
+
+ exit:
+
+ if (picture) {
+ picture->release(picture);
+ }
+
+ if (mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) {
+ if (encoded_mem) {
+ encoded_mem->release(encoded_mem);
+ }
+ if (cookie2) {
+ delete (ExifElementsTable*) cookie2;
+ }
+ encoder = gEncoderQueue.valueFor(src);
+ if (encoder.get()) {
+ gEncoderQueue.removeItem(src);
+ encoder.clear();
+ }
+ mFrameProvider->returnFrame(camera_buffer, type);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ * NotificationHandler class
+ */
+
+///Initialization function for AppCallbackNotifier
+status_t AppCallbackNotifier::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ mPreviewMemory = 0;
+
+ mMeasurementEnabled = false;
+
+ mNotifierState = NOTIFIER_STOPPED;
+
+ ///Create the app notifier thread
+ mNotificationThread = new NotificationThread(this);
+ if(!mNotificationThread.get())
+ {
+ CAMHAL_LOGEA("Couldn't create Notification thread");
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mNotificationThread->run("NotificationThread", android::PRIORITY_URGENT_DISPLAY);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't run NotificationThread");
+ mNotificationThread.clear();
+ return ret;
+ }
+
+ mUseMetaDataBufferMode = true;
+ mRawAvailable = false;
+
+ mRecording = false;
+ mPreviewing = false;
+ mExternalLocking = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mCameraHal = cameraHal;
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mRequestMemory = get_memory;
+ mCallbackCookie = user;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setMeasurements(bool enable)
+{
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = enable;
+
+ if ( enable )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+//All sub-components of Camera HAL call this whenever any error happens
+void AppCallbackNotifier::errorNotify(int error)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGEB("AppCallbackNotifier received error %d", error);
+
+ // If it is a fatal error abort here!
+ // If TILER is Out of memory we notify Mediaserver so that Memory is cleared and we can restart usecase
+ if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD) || (error == -ENOMEM))
+ {
+ //We kill media server if we encounter these errors as there is
+ //no point continuing and apps also don't handle errors other
+ //than media server death always.
+ abort();
+ return;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ERROR) ) )
+ {
+ CAMHAL_LOGEB("AppCallbackNotifier mNotifyCb %d", error);
+ mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0, mCallbackCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCallbackNotifier::notificationThread()
+{
+ bool shouldLive = true;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDA("Notification Thread waiting for message");
+ ret = Utils::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ &mEventQ,
+ &mFrameQ,
+ AppCallbackNotifier::NOTIFIER_TIMEOUT);
+
+ //CAMHAL_LOGDA("Notification Thread received message");
+
+ if (mNotificationThread->msgQ().hasMsg()) {
+ ///Received a message from CameraHal, process it
+ CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
+ shouldLive = processMessage();
+ if(!shouldLive) {
+ CAMHAL_LOGDA("Notification Thread exiting.");
+ return shouldLive;
+ }
+ }
+
+ if(mEventQ.hasMsg()) {
+ ///Received an event from one of the event providers
+ CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
+ notifyEvent();
+ }
+
+ if(mFrameQ.hasMsg()) {
+ ///Received a frame from one of the frame providers
+ //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
+ notifyFrame();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return shouldLive;
+}
+
+void AppCallbackNotifier::notifyEvent()
+{
+ ///Receive and send the event notifications to app
+ Utils::Message msg;
+ LOG_FUNCTION_NAME;
+ {
+ android::AutoMutex lock(mLock);
+ if ( !mEventQ.hasMsg() ) {
+ return;
+ } else {
+ mEventQ.get(&msg);
+ }
+ }
+ bool ret = true;
+ CameraHalEvent *evt = NULL;
+ CameraHalEvent::FocusEventData *focusEvtData;
+ CameraHalEvent::ZoomEventData *zoomEvtData;
+ CameraHalEvent::MetaEventData metaEvtData;
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ return;
+ }
+
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT:
+
+ evt = ( CameraHalEvent * ) msg.arg1;
+
+ if ( NULL == evt )
+ {
+ CAMHAL_LOGEA("Invalid CameraHalEvent");
+ return;
+ }
+
+ switch(evt->mEventType)
+ {
+ case CameraHalEvent::EVENT_SHUTTER:
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
+ {
+ mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
+ }
+ mRawAvailable = false;
+
+ break;
+
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+ if ( mCameraHal && mNotifyCb ) {
+ focusEvtData = &evt->mEventData->focusEvent;
+
+ switch ( focusEvtData->focusStatus ) {
+ case CameraHalEvent::FOCUS_STATUS_SUCCESS:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_FAIL:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ }
+ break;
+
+#ifdef ANDROID_API_JB_OR_LATER
+ case CameraHalEvent::FOCUS_STATUS_PENDING:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_DONE:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie);
+ }
+ break;
+#endif
+ }
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
+
+ zoomEvtData = &evt->mEventData->zoomEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
+ {
+ mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_METADATA:
+
+ metaEvtData = evt->mEventData->metadataEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
+ {
+ // WA for an issue inside CameraService
+ camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
+
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA,
+ tmpBuffer,
+ 0,
+ metaEvtData->getMetadataResult(),
+ mCallbackCookie);
+
+ metaEvtData.clear();
+
+ if ( NULL != tmpBuffer ) {
+ tmpBuffer->release(tmpBuffer);
+ }
+
+ }
+
+ break;
+
+ case CameraHalEvent::ALL_EVENTS:
+ break;
+ default:
+ break;
+ }
+
+ break;
+ }
+
+ if ( NULL != evt )
+ {
+ delete evt;
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+static void alignYV12(int width,
+ int height,
+ size_t &yStride,
+ size_t &uvStride,
+ size_t &ySize,
+ size_t &uvSize,
+ size_t &size)
+{
+ yStride = ( width + 0xF ) & ~0xF;
+ uvStride = ( yStride / 2 + 0xF ) & ~0xF;
+ ySize = yStride * height;
+ uvSize = uvStride * height / 2;
+ size = ySize + uvSize * 2;
+}
+
+static void copy2Dto1D(void *dst,
+ void *src,
+ int width,
+ int height,
+ size_t stride,
+ uint32_t offset,
+ unsigned int bytesPerPixel,
+ size_t length,
+ const char *pixelFormat)
+{
+ unsigned int alignedRow, row;
+ unsigned char *bufferDst, *bufferSrc;
+ unsigned char *bufferDstEnd, *bufferSrcEnd;
+ uint16_t *bufferSrc_UV;
+
+ unsigned int *y_uv = (unsigned int *)src;
+
+ CAMHAL_LOGVB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0], y_uv[1]);
+ CAMHAL_LOGVB("pixelFormat = %s; offset=%d",pixelFormat,offset);
+
+ if (pixelFormat!=NULL) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ bufferSrc = ( unsigned char * ) y_uv[0] + offset;
+ uint32_t xOff = offset % stride;
+ uint32_t yOff = offset / stride;
+ uint8_t *bufferSrcUV = ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
+ uint8_t *bufferSrcUVEven = bufferSrcUV;
+
+ uint8_t *bufferDstY = ( uint8_t * ) dst;
+ uint8_t *bufferDstU = bufferDstY + 1;
+ uint8_t *bufferDstV = bufferDstY + 3;
+
+ // going to convert from NV12 here and return
+ for ( int i = 0 ; i < height; i ++ ) {
+ for ( int j = 0 ; j < width / 2 ; j++ ) {
+
+ // Y
+ *bufferDstY = *bufferSrc;
+ bufferSrc++;
+ bufferDstY += 2;
+
+ *bufferDstY = *bufferSrc;
+ bufferSrc++;
+ bufferDstY += 2;
+
+ // V
+ *bufferDstV = *(bufferSrcUV + 1);
+ bufferDstV += 4;
+
+ // U
+ *bufferDstU = *bufferSrcUV;
+ bufferDstU += 4;
+
+ bufferSrcUV += 2;
+ }
+ if ( i % 2 ) {
+ bufferSrcUV += ( stride - width);
+ bufferSrcUVEven = bufferSrcUV;
+ } else {
+ bufferSrcUV = bufferSrcUVEven;
+ }
+ bufferSrc += ( stride - width);
+ }
+
+ return;
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ bytesPerPixel = 1;
+ bufferDst = ( unsigned char * ) dst;
+ bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
+ bufferSrc = ( unsigned char * ) y_uv[0] + offset;
+ bufferSrcEnd = ( unsigned char * ) ( ( size_t ) y_uv[0] + length + offset);
+ row = width*bytesPerPixel;
+ alignedRow = stride-width;
+ int stride_bytes = stride / 8;
+ uint32_t xOff = offset % stride;
+ uint32_t yOff = offset / stride;
+
+ // going to convert from NV12 here and return
+ // Step 1: Y plane: iterate through each row and copy
+ for ( int i = 0 ; i < height ; i++) {
+ memcpy(bufferDst, bufferSrc, row);
+ bufferSrc += stride;
+ bufferDst += row;
+ if ( ( bufferSrc > bufferSrcEnd ) || ( bufferDst > bufferDstEnd ) ) {
+ break;
+ }
+ }
+
+ bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
+
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ uint16_t *bufferDst_UV;
+
+ // Step 2: UV plane: convert NV12 to NV21 by swapping U & V
+ bufferDst_UV = (uint16_t *) (((uint8_t*)dst)+row*height);
+
+#ifdef ARCH_ARM_HAVE_NEON
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " vswp q0, q1 \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst2.8 {q0,q1},[%[dst]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst2.8 {d0,d1},[%[dst]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst2.8 {d0[0],d1[0]},[%[dst]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (bufferDst_UV), [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+#else
+ for ( int i = 0; i < height/2; ++i ) {
+ for ( int j = 0; j < width/2; ++j ) {
+ bufferDst_UV[j] = (bufferSrc_UV[j] >> 8) | (bufferSrc_UV[j] << 8);
+ }
+ bufferSrc_UV += stride/bytesPerPixel/2;
+ bufferDst_UV += width/2;
+ }
+#endif
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ // Step 2: UV plane: convert NV12 to YV12 by de-interleaving U & V
+ // TODO(XXX): This version of CameraHal assumes NV12 format it set at
+ // camera adapter to support YV12. Need to address for
+ // USBCamera
+
+ size_t yStride, uvStride, ySize, uvSize, size;
+ alignYV12(width, height, yStride, uvStride, ySize, uvSize, size);
+
+ uint16_t *bufferDst_V = (uint16_t *) (((uint8_t*)dst) + ySize);
+ uint16_t *bufferDst_U = (uint16_t *) (((uint8_t*)dst) + ySize + uvSize);
+
+#ifdef ARCH_ARM_HAVE_NEON
+ int inc = (uvStride - width/2)/2;
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst1.8 {q1},[%[dst_v]]! \n\t"
+ " vst1.8 {q0},[%[dst_u]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst1.8 {d1},[%[dst_v]]! \n\t"
+ " vst1.8 {d0},[%[dst_u]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst1.8 {d1[0]},[%[dst_v]]! \n\t"
+ " vst1.8 {d0[0]},[%[dst_u]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_u] "+r" (bufferDst_U), [dst_v] "+r" (bufferDst_V),
+ [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+
+ bufferDst_U += inc;
+ bufferDst_V += inc;
+ }
+#else
+ uint8_t * udst = reinterpret_cast<uint8_t*>(bufferDst_V);
+ uint8_t * vdst = reinterpret_cast<uint8_t*>(bufferDst_U);
+
+ for ( int i = 0; i < height/2; ++i ) {
+ for ( int j = 0; j < width/2; ++j ) {
+ udst[j] = bufferSrc_UV[j] >> 8;
+ vdst[j] = bufferSrc_UV[j] & 0x00ff;
+ }
+ bufferSrc_UV += stride/bytesPerPixel/2;
+ udst += width/2;
+ vdst += width/2;
+ }
+#endif
+ }
+ return ;
+
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ bytesPerPixel = 2;
+ }
+ }
+
+ bufferDst = ( unsigned char * ) dst;
+ bufferSrc = ( unsigned char * ) y_uv[0];
+ row = width*bytesPerPixel;
+ alignedRow = ( row + ( stride -1 ) ) & ( ~ ( stride -1 ) );
+
+ //iterate through each row
+ for ( int i = 0 ; i < height ; i++, bufferSrc += alignedRow, bufferDst += row) {
+ memcpy(bufferDst, bufferSrc, row);
+ }
+}
+
+static void copyCroppedNV12(CameraFrame* frame, unsigned char *dst)
+{
+ unsigned int stride, width, height;
+ uint32_t offset, uvoffset;
+ size_t size;
+
+ CAMHAL_ASSERT(frame && dst);
+
+ offset = frame->mOffset;
+ stride = frame->mAlignment;
+ width = frame->mWidth;
+ height = frame->mHeight;
+ size = frame->mLength;
+ unsigned const char *src = (unsigned char *) frame->mBuffer->mapped;
+
+ // offset to beginning of uv plane
+ uvoffset = (offset + size) * 2 / 3;
+ // offset to beginning of valid region of uv plane
+ uvoffset += (offset - (offset % stride)) / 2 + (offset % stride);
+
+ // start of valid luma region
+ unsigned const char *luma = src + offset;
+ // start of valid chroma region
+ unsigned const char *chroma = src + uvoffset;
+
+ // copy luma and chroma line x line
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(dst, luma, width);
+ luma += stride;
+ dst += width;
+ }
+ for (unsigned int i = 0; i < height / 2; i++) {
+ memcpy(dst, chroma, width);
+ chroma += stride;
+ dst += width;
+ }
+}
+
+void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ void *dest = NULL, *src = NULL;
+
+ // scope for lock
+ if (mCameraHal->msgTypeEnabled(msgType)) {
+ android::AutoMutex lock(mLock);
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ if (frame->mBuffer->format &&
+ (strcmp(frame->mBuffer->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) &&
+ (frame->mAlignment != frame->mWidth) &&
+ ( msgType == CAMERA_MSG_RAW_IMAGE )) {
+ size_t size;
+
+ size = CameraHal::calculateBufferSize(frame->mBuffer->format, frame->mWidth, frame->mHeight);
+ picture = mRequestMemory(-1, size, 1, NULL);
+ if (picture && picture->data) {
+ copyCroppedNV12(frame, (unsigned char*) picture->data);
+ }
+ } else {
+ picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ src = (void *) ((unsigned int) frame->mBuffer->mapped + frame->mOffset);
+ memcpy(dest, src, frame->mLength);
+ }
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if(picture) {
+ if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType)) {
+ mDataCb(msgType, picture, 0, NULL, mCallbackCookie);
+ }
+ picture->release(picture);
+ }
+}
+
+void AppCallbackNotifier::lockBufferAndUpdatePtrs(CameraFrame* frame)
+{
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = frame->mWidth;
+ bounds.bottom = frame->mHeight;
+ void *y_uv[2];
+ buffer_handle_t *handle = reinterpret_cast<buffer_handle_t *>(frame->mBuffer->opaque);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ frame->mBuffer->mapped = y_uv[0];
+ frame->mYuv[0] = reinterpret_cast<int>(frame->mBuffer->mapped);
+ frame->mYuv[1] = frame->mYuv[0] + (frame->mLength + frame->mOffset)*2/3;
+}
+
+void AppCallbackNotifier::unlockBufferAndUpdatePtrs(CameraFrame* frame)
+{
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ buffer_handle_t *handle = reinterpret_cast<buffer_handle_t *>(frame->mBuffer->opaque);
+ mapper.unlock(*handle);
+ frame->mBuffer->mapped = NULL;
+ frame->mYuv[0] = NULL;
+ frame->mYuv[1] = NULL;
+}
+
+void AppCallbackNotifier::setExternalLocking(bool extBuffLocking)
+{
+ mExternalLocking = extBuffLocking;
+}
+
+void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ CameraBuffer * dest = NULL;
+
+ // scope for lock
+ {
+ android::AutoMutex lock(mLock);
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ goto exit;
+ }
+
+ dest = &mPreviewBuffers[mPreviewBufCount];
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
+ CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
+ __LINE__,
+ dest,
+ frame->mBuffer,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+
+ /* FIXME map dest */
+ if ( NULL != dest && dest->mapped != NULL ) {
+ // data sync frames don't need conversion
+ if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) {
+ if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
+ memcpy(dest->mapped, (void*) frame->mBuffer->mapped, frame->mLength);
+ } else {
+ memset(dest->mapped, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ }
+ } else {
+ if ((NULL == frame->mYuv[0]) || (NULL == frame->mYuv[1])){
+ CAMHAL_LOGEA("Error! One of the YUV Pointer is NULL");
+ goto exit;
+ }
+ else{
+ copy2Dto1D(dest->mapped,
+ frame->mYuv,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
+ frame->mOffset,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+ }
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType) &&
+ (dest != NULL) && (dest->mapped != NULL)) {
+ android::AutoMutex locker(mLock);
+ if ( mPreviewMemory )
+ mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ }
+
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
+
+ // increment for next buffer
+ mPreviewBufCount = (mPreviewBufCount + 1) % AppCallbackNotifier::MAX_BUFFERS;
+}
+
+status_t AppCallbackNotifier::dummyRaw()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mRequestMemory ) {
+ CAMHAL_LOGEA("Can't allocate memory for dummy raw callback!");
+ return NO_INIT;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) ){
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) {
+ camera_memory_t *dummyRaw = mRequestMemory(-1, 1, 1, NULL);
+
+ if ( NULL == dummyRaw ) {
+ CAMHAL_LOGEA("Dummy raw buffer allocation failed!");
+ return NO_MEMORY;
+ }
+
+ mDataCb(CAMERA_MSG_RAW_IMAGE, dummyRaw, 0, NULL, mCallbackCookie);
+
+ dummyRaw->release(dummyRaw);
+ } else if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+void AppCallbackNotifier::notifyFrame()
+{
+ ///Receive and send the frame notifications to app
+ Utils::Message msg;
+ CameraFrame *frame;
+ android::MemoryHeapBase *heap;
+ android::MemoryBase *buffer = NULL;
+ android::sp<android::MemoryBase> memBase;
+ void *buf = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ android::AutoMutex lock(mLock);
+ if(!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ } else {
+ return;
+ }
+ }
+
+ bool ret = true;
+
+ frame = NULL;
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME:
+
+ frame = (CameraFrame *) msg.arg1;
+ if(!frame)
+ {
+ break;
+ }
+
+ if ( (CameraFrame::RAW_FRAME == frame->mFrameType )&&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) )
+ {
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) )
+ {
+#ifdef COPY_IMAGE_BUFFER
+ copyAndSendPictureFrame(frame, CAMERA_MSG_RAW_IMAGE);
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else {
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+
+ mRawAvailable = true;
+
+ }
+ else if ( (CameraFrame::IMAGE_FRAME == frame->mFrameType) &&
+ (NULL != mCameraHal) &&
+ (NULL != mDataCb) &&
+ (CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks) )
+ {
+
+ int encode_quality = 100, tn_quality = 100;
+ int tn_width, tn_height;
+ unsigned int current_snapshot = 0;
+ Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL;
+ void* exif_data = NULL;
+ const char *previewFormat = NULL;
+ camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if(raw_picture) {
+ buf = raw_picture->data;
+ }
+
+ android::CameraParameters parameters;
+ char *params = mCameraHal->getParameters();
+ const android::String8 strParams(params);
+ parameters.unflatten(strParams);
+
+ encode_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if (encode_quality < 0 || encode_quality > 100) {
+ encode_quality = 100;
+ }
+
+ tn_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (tn_quality < 0 || tn_quality > 100) {
+ tn_quality = 100;
+ }
+
+ if (CameraFrame::HAS_EXIF_DATA & frame->mQuirks) {
+ exif_data = frame->mCookie2;
+ }
+
+ main_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+
+ // Video snapshot with LDCNSF on adds a few bytes start offset
+ // and a few bytes on every line. They must be skipped.
+ int rightCrop = frame->mAlignment/2 - frame->mWidth;
+
+ CAMHAL_LOGDB("Video snapshot right crop = %d", rightCrop);
+ CAMHAL_LOGDB("Video snapshot offset = %d", frame->mOffset);
+
+ if (main_jpeg) {
+ main_jpeg->src = (uint8_t *)frame->mBuffer->mapped;
+ main_jpeg->src_size = frame->mLength;
+ main_jpeg->dst = (uint8_t*) buf;
+ main_jpeg->dst_size = frame->mLength;
+ main_jpeg->quality = encode_quality;
+ main_jpeg->in_width = frame->mAlignment/2; // use stride here
+ main_jpeg->in_height = frame->mHeight;
+ main_jpeg->out_width = frame->mAlignment/2;
+ main_jpeg->out_height = frame->mHeight;
+ main_jpeg->right_crop = rightCrop;
+ main_jpeg->start_offset = frame->mOffset;
+ if ( CameraFrame::FORMAT_YUV422I_UYVY & frame->mQuirks) {
+ main_jpeg->format = TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY;
+ }
+ else { //if ( CameraFrame::FORMAT_YUV422I_YUYV & frame->mQuirks)
+ main_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ }
+
+ tn_width = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ previewFormat = parameters.getPreviewFormat();
+
+ if ((tn_width > 0) && (tn_height > 0) && ( NULL != previewFormat )) {
+ tn_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+ // if malloc fails just keep going and encode main jpeg
+ if (!tn_jpeg) {
+ tn_jpeg = NULL;
+ }
+ }
+
+ if (tn_jpeg) {
+ int width, height;
+ parameters.getPreviewSize(&width,&height);
+ current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS;
+ tn_jpeg->src = (uint8_t *)mPreviewBuffers[current_snapshot].mapped;
+ tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS;
+ tn_jpeg->dst_size = CameraHal::calculateBufferSize(previewFormat,
+ tn_width,
+ tn_height);
+ tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->dst_size);
+ tn_jpeg->quality = tn_quality;
+ tn_jpeg->in_width = width;
+ tn_jpeg->in_height = height;
+ tn_jpeg->out_width = tn_width;
+ tn_jpeg->out_height = tn_height;
+ tn_jpeg->right_crop = 0;
+ tn_jpeg->start_offset = 0;
+ tn_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV420SP;;
+ }
+
+ android::sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ tn_jpeg,
+ AppCallbackNotifierEncoderCallback,
+ (CameraFrame::FrameType)frame->mFrameType,
+ this,
+ raw_picture,
+ exif_data, frame->mBuffer);
+ gEncoderQueue.add(frame->mBuffer->mapped, encoder);
+ encoder->run();
+ encoder.clear();
+ if (params != NULL)
+ {
+ mCameraHal->putParameters(params);
+ }
+ }
+ else if ( ( CameraFrame::IMAGE_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) )
+ {
+
+ // CTS, MTS requirements: Every 'takePicture()' call
+ // who registers a raw callback should receive one
+ // as well. This is not always the case with
+ // CameraAdapters though.
+ if (!mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE)) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+#ifdef COPY_IMAGE_BUFFER
+ {
+ android::AutoMutex lock(mBurstLock);
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ if ( mBurst )
+ {
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_BURST_IMAGE);
+ }
+ else
+#endif
+ {
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_IMAGE);
+ }
+ }
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else if ( ( CameraFrame::VIDEO_FRAME_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
+ {
+ android::AutoMutex locker(mRecordingLock);
+ if(mRecording)
+ {
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t *videoMedatadaBufferMemory =
+ mVideoMetadataBufferMemoryMap.valueFor(frame->mBuffer->opaque);
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
+
+ if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ if ( mUseVideoBuffers )
+ {
+ CameraBuffer *vBuf = mVideoMap.valueFor(frame->mBuffer->opaque);
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mVideoWidth;
+ bounds.bottom = mVideoHeight;
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
+ void *y_uv[2];
+ mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ y_uv[1] = y_uv[0] + mVideoHeight*4096;
+
+ structConvImage input = {frame->mWidth,
+ frame->mHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)frame->mYuv[0],
+ (mmByte *)frame->mYuv[1],
+ frame->mOffset};
+
+ structConvImage output = {mVideoWidth,
+ mVideoHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)y_uv[0],
+ (mmByte *)y_uv[1],
+ 0};
+
+ VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
+ mapper.unlock((buffer_handle_t)vBuf->opaque);
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ /* FIXME remove cast */
+ videoMetadataBuffer->handle = (void *)vBuf->opaque;
+ videoMetadataBuffer->offset = 0;
+ }
+ else
+ {
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = camera_buffer_get_omx_ptr(frame->mBuffer);
+ videoMetadataBuffer->offset = frame->mOffset;
+ }
+
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
+ frame->mBuffer->opaque, videoMetadataBuffer, videoMedatadaBufferMemory);
+
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
+ videoMedatadaBufferMemory, 0, mCallbackCookie);
+ }
+ else
+ {
+ //TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory
+ camera_memory_t* fakebuf = mRequestMemory(-1, sizeof(buffer_handle_t), 1, NULL);
+ if( (NULL == fakebuf) || ( NULL == fakebuf->data) || ( NULL == frame->mBuffer))
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
+ *reinterpret_cast<buffer_handle_t*>(fakebuf->data) = reinterpret_cast<buffer_handle_t>(frame->mBuffer->mapped);
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
+ fakebuf->release(fakebuf);
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
+ }
+ }
+ }
+ else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb)) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_POSTVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::PREVIEW_FRAME_SYNC== frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::FRAME_DATA_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ CAMHAL_LOGDB("Frame type 0x%x is still unsupported!", frame->mFrameType);
+ }
+
+ break;
+
+ default:
+
+ break;
+
+ };
+
+exit:
+
+ if ( NULL != frame )
+ {
+ delete frame;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (caFrame->mCookie);
+ appcbn->frameCallback(caFrame);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
+{
+ ///Post the event to the event queue of AppCallbackNotifier
+ Utils::Message msg;
+ CameraFrame *frame;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != caFrame )
+ {
+
+ frame = new CameraFrame(*caFrame);
+ if ( NULL != frame )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME;
+ msg.arg1 = frame;
+ mFrameQ.put(&msg);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraFrame");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::flushAndReturnFrames()
+{
+ LOG_FUNCTION_NAME;
+
+ Utils::Message msg;
+ CameraFrame *frame;
+
+ android::AutoMutex lock(mLock);
+ while (!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ frame = (CameraFrame*) msg.arg1;
+ if (frame) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::eventCallbackRelay(CameraHalEvent* chEvt)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (chEvt->mCookie);
+ appcbn->eventCallback(chEvt);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
+{
+
+ ///Post the event to the event queue of AppCallbackNotifier
+ Utils::Message msg;
+ CameraHalEvent *event;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != chEvt )
+ {
+
+ event = new CameraHalEvent(*chEvt);
+ if ( NULL != event )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
+ msg.arg1 = event;
+ {
+ android::AutoMutex lock(mLock);
+ mEventQ.put(&msg);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraHalEvent");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void AppCallbackNotifier::flushEventQueue()
+{
+
+ {
+ android::AutoMutex lock(mLock);
+ mEventQ.clear();
+ }
+}
+
+
+bool AppCallbackNotifier::processMessage()
+{
+ ///Retrieve the command from the command queue and process it
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("+Msg get...");
+ mNotificationThread->msgQ().get(&msg);
+ CAMHAL_LOGDA("-Msg get...");
+ bool ret = true;
+
+ switch(msg.command)
+ {
+ case NotificationThread::NOTIFIER_EXIT:
+ {
+ CAMHAL_LOGD("Received NOTIFIER_EXIT command from Camera HAL");
+ mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED;
+ ret = false;
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGEA("Error: ProcessMsg() command from Camera HAL");
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+
+}
+
+AppCallbackNotifier::~AppCallbackNotifier()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Stop app callback notifier if not already stopped
+ stop();
+
+ ///Unregister with the frame provider
+ if ( NULL != mFrameProvider )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ }
+
+ //unregister with the event provider
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ }
+
+ Utils::Message msg = {0,0,0,0,0,0};
+ msg.command = NotificationThread::NOTIFIER_EXIT;
+
+ ///Post the message to display thread
+ mNotificationThread->msgQ().put(&msg);
+
+ //Exit and cleanup the thread
+ mNotificationThread->requestExit();
+ mNotificationThread->join();
+
+ //Delete the display thread
+ mNotificationThread.clear();
+
+
+ ///Free the event and frame providers
+ if ( NULL != mEventProvider )
+ {
+ ///Deleting the event provider
+ CAMHAL_LOGDA("Stopping Event Provider");
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ if ( NULL != mFrameProvider )
+ {
+ ///Deleting the frame provider
+ CAMHAL_LOGDA("Stopping Frame Provider");
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ releaseSharedVideoBuffers();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+//Free all video heaps and buffers
+void AppCallbackNotifier::releaseSharedVideoBuffers()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t* videoMedatadaBufferMemory;
+ for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
+ {
+ videoMedatadaBufferMemory = mVideoMetadataBufferMemoryMap.valueAt(i);
+ if(NULL != videoMedatadaBufferMemory)
+ {
+ videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=%p", videoMedatadaBufferMemory);
+ }
+ }
+
+ mVideoMetadataBufferMemoryMap.clear();
+ mVideoMetadataBufferReverseMap.clear();
+ if (mUseVideoBuffers)
+ {
+ mVideoMap.clear();
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get start command from CameraHal
+ ///@Remarks Currently only one event provider (CameraAdapter) is supported
+ ///@todo Have an array of event providers for each event bitmask
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
+{
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get the start command from CameraAdapter
+ mFrameProvider = new FrameProvider(frameNotifier, this, frameCallbackRelay);
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Error in creating FrameProvider");
+ }
+ else
+ {
+ //Register only for captured images and RAW for now
+ //TODO: Register for and handle all types of frames
+ mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+{
+ unsigned int *bufArr;
+ int size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( mPreviewing )
+ {
+ CAMHAL_LOGDA("+Already previewing");
+ return NO_INIT;
+ }
+
+ int w,h;
+ ///Get preview size
+ params.getPreviewSize(&w, &h);
+
+ // save preview pixel format, size and stride
+ mPreviewWidth = w;
+ mPreviewHeight = h;
+ mPreviewStride = 4096;
+ mPreviewPixelFormat = CameraHal::getPixelFormatConstant(params.getPreviewFormat());
+ size = CameraHal::calculateBufferSize(mPreviewPixelFormat, w, h);
+
+ mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
+ if (!mPreviewMemory) {
+ return NO_MEMORY;
+ }
+
+ for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {
+ mPreviewBuffers[i].type = CAMERA_BUFFER_MEMORY;
+ mPreviewBuffers[i].opaque = (unsigned char*) mPreviewMemory->data + (i*size);
+ mPreviewBuffers[i].mapped = mPreviewBuffers[i].opaque;
+ }
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ mPreviewBufCount = 0;
+
+ mPreviewing = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+void AppCallbackNotifier::setBurst(bool burst)
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mBurstLock);
+
+ mBurst = burst;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::useVideoBuffers(bool useVideoBuffers)
+{
+ LOG_FUNCTION_NAME;
+
+ mUseVideoBuffers = useVideoBuffers;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCallbackNotifier::getUesVideoBuffers()
+{
+ return mUseVideoBuffers;
+}
+
+void AppCallbackNotifier::setVideoRes(int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ mVideoWidth = width;
+ mVideoHeight = height;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::stopPreviewCallbacks()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop preview callbacks without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( !mPreviewing )
+ {
+ return NO_INIT;
+ }
+
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+
+ {
+ android::AutoMutex lock(mLock);
+ mPreviewMemory->release(mPreviewMemory);
+ mPreviewMemory = 0;
+ }
+
+ mPreviewing = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::useMetaDataBufferMode(bool enable)
+{
+ mUseMetaDataBufferMode = enable;
+
+ return NO_ERROR;
+}
+
+
+status_t AppCallbackNotifier::startRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ mRecording = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//Allocate metadata buffers for video recording
+status_t AppCallbackNotifier::initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t* videoMedatadaBufferMemory = NULL;
+
+ if(NULL == buffers)
+ {
+ CAMHAL_LOGEA("Error! Video buffers are NULL");
+ return BAD_VALUE;
+ }
+
+ for (uint32_t i = 0; i < count; i++)
+ {
+ videoMedatadaBufferMemory = mRequestMemory(-1, sizeof(video_metadata_t), 1, NULL);
+ if((NULL == videoMedatadaBufferMemory) || (NULL == videoMedatadaBufferMemory->data))
+ {
+ CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers");
+ return NO_MEMORY;
+ }
+
+ // FIXME remove cast
+ mVideoMetadataBufferMemoryMap.add((void *)buffers[i].opaque, videoMedatadaBufferMemory);
+ mVideoMetadataBufferReverseMap.add(videoMedatadaBufferMemory->data, &buffers[i]);
+ CAMHAL_LOGDB("buffers[%d]=%p, videoMedatadaBufferMemory=%p, videoMedatadaBufferMemory->data=%p",
+ i, &buffers[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
+
+ if (vidBufs != NULL)
+ {
+ //ASSERT(buffers[i].type == CAMERA_BUFFER_GRALLOC);
+ // FIXME remove cast
+ mVideoMap.add((void *)buffers[i].opaque, &vidBufs[i]);
+ CAMHAL_LOGVB("buffers[%d]=%p, vBuffArr[%d]=%p", i, &buffers[i], i, &vidBufs[i]);
+ }
+ }
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::stopRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(!mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ ///Release the shared video buffers
+ releaseSharedVideoBuffers();
+
+ mRecording = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
+{
+ status_t ret = NO_ERROR;
+ CameraBuffer *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if ( NULL == mem )
+ {
+ CAMHAL_LOGEA("Video Frame released is invalid");
+ ret = -1;
+ }
+
+ if( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ if(mUseMetaDataBufferMode)
+ {
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
+ /* FIXME remove cast */
+ frame = mVideoMetadataBufferReverseMap.valueFor(videoMetadataBuffer);
+ CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
+ videoMetadataBuffer, videoMetadataBuffer->handle, frame);
+ }
+ else
+ {
+ /* FIXME this won't work */
+ frame = (CameraBuffer *)(void*)(*((uint32_t *)mem));
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mFrameProvider->returnFrame(frame, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::enableMsgType(int32_t msgType)
+{
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ return NO_ERROR;
+}
+
+status_t AppCallbackNotifier::disableMsgType(int32_t msgType)
+{
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->disableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::start()
+{
+ LOG_FUNCTION_NAME;
+ if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ ///Check whether initial conditions are met for us to start
+ ///A frame provider should be available, if not return error
+ if(!mFrameProvider)
+ {
+ ///AppCallbackNotifier not properly initialized
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ///At least one event notifier should be available, if not return error
+ ///@todo Modify here when there is an array of event providers
+ if(!mEventProvider)
+ {
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ ///AppCallbackNotifier not properly initialized
+ return NO_INIT;
+ }
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n");
+
+ gEncoderQueue.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::stop()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mNotifierState!=AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already in stopped state");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+ {
+ android::AutoMutex lock(mLock);
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
+ }
+
+ while(!gEncoderQueue.isEmpty()) {
+ android::sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
+ camera_memory_t* encoded_mem = NULL;
+ ExifElementsTable* exif = NULL;
+
+ if(encoder.get()) {
+ encoder->cancel();
+
+ encoder->getCookies(NULL, (void**) &encoded_mem, (void**) &exif);
+ if (encoded_mem) {
+ encoded_mem->release(encoded_mem);
+ }
+ if (exif) {
+ delete exif;
+ }
+
+ encoder.clear();
+ }
+ gEncoderQueue.removeItemsAt(0);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+/*--------------------NotificationHandler Class ENDS here-----------------------------*/
+
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
new file mode 100644
index 0000000..bfcd74c
--- /dev/null
+++ b/camera/BaseCameraAdapter.cpp
@@ -0,0 +1,2724 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "BaseCameraAdapter.h"
+
+const int EVENT_MASK = 0xffff;
+
+namespace Ti {
+namespace Camera {
+
+const LUT cameraCommandsUserToHAL[] = {
+ { "CAMERA_START_PREVIEW", CameraAdapter::CAMERA_START_PREVIEW },
+ { "CAMERA_STOP_PREVIEW", CameraAdapter::CAMERA_STOP_PREVIEW },
+ { "CAMERA_START_VIDEO", CameraAdapter::CAMERA_START_VIDEO },
+ { "CAMERA_STOP_VIDEO", CameraAdapter::CAMERA_STOP_VIDEO },
+ { "CAMERA_START_IMAGE_CAPTURE", CameraAdapter::CAMERA_START_IMAGE_CAPTURE },
+ { "CAMERA_STOP_IMAGE_CAPTURE", CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE },
+ { "CAMERA_PERFORM_AUTOFOCUS", CameraAdapter::CAMERA_PERFORM_AUTOFOCUS },
+ { "CAMERA_CANCEL_AUTOFOCUS", CameraAdapter::CAMERA_CANCEL_AUTOFOCUS },
+ { "CAMERA_PREVIEW_FLUSH_BUFFERS", CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS },
+ { "CAMERA_START_SMOOTH_ZOOM", CameraAdapter::CAMERA_START_SMOOTH_ZOOM },
+ { "CAMERA_STOP_SMOOTH_ZOOM", CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM },
+ { "CAMERA_USE_BUFFERS_PREVIEW", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW },
+ { "CAMERA_SET_TIMEOUT", CameraAdapter::CAMERA_SET_TIMEOUT },
+ { "CAMERA_CANCEL_TIMEOUT", CameraAdapter::CAMERA_CANCEL_TIMEOUT },
+ { "CAMERA_START_BRACKET_CAPTURE", CameraAdapter::CAMERA_START_BRACKET_CAPTURE },
+ { "CAMERA_STOP_BRACKET_CAPTURE", CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE },
+ { "CAMERA_QUERY_RESOLUTION_PREVIEW", CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW },
+ { "CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE },
+ { "CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA },
+ { "CAMERA_USE_BUFFERS_IMAGE_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE },
+ { "CAMERA_USE_BUFFERS_PREVIEW_DATA", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA },
+ { "CAMERA_TIMEOUT_EXPIRED", CameraAdapter::CAMERA_TIMEOUT_EXPIRED },
+ { "CAMERA_START_FD", CameraAdapter::CAMERA_START_FD },
+ { "CAMERA_STOP_FD", CameraAdapter::CAMERA_STOP_FD },
+ { "CAMERA_SWITCH_TO_EXECUTING", CameraAdapter::CAMERA_SWITCH_TO_EXECUTING },
+ { "CAMERA_USE_BUFFERS_VIDEO_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE },
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ { "CAMERA_USE_BUFFERS_REPROCESS", CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS },
+ { "CAMERA_START_REPROCESS", CameraAdapter::CAMERA_START_REPROCESS },
+#endif
+};
+
+const LUTtypeHAL CamCommandsLUT = {
+ sizeof(cameraCommandsUserToHAL)/sizeof(cameraCommandsUserToHAL[0]),
+ cameraCommandsUserToHAL
+};
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+BaseCameraAdapter::BaseCameraAdapter()
+{
+ mReleaseImageBuffersCallback = NULL;
+ mEndImageCaptureCallback = NULL;
+ mErrorNotifier = NULL;
+ mEndCaptureData = NULL;
+ mReleaseData = NULL;
+ mRecording = false;
+
+ mPreviewBuffers = NULL;
+ mPreviewBufferCount = 0;
+ mPreviewBuffersLength = 0;
+
+ mVideoBuffers = NULL;
+ mVideoBuffersCount = 0;
+ mVideoBuffersLength = 0;
+
+ mCaptureBuffers = NULL;
+ mCaptureBuffersCount = 0;
+ mCaptureBuffersLength = 0;
+
+ mPreviewDataBuffers = NULL;
+ mPreviewDataBuffersCount = 0;
+ mPreviewDataBuffersLength = 0;
+
+ mAdapterState = INTIALIZED_STATE;
+
+ mSharedAllocator = NULL;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ mStartFocus.tv_sec = 0;
+ mStartFocus.tv_usec = 0;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+#endif
+
+}
+
+BaseCameraAdapter::~BaseCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mSubscriberLock);
+
+ mFrameSubscribers.clear();
+ mImageSubscribers.clear();
+ mRawSubscribers.clear();
+ mVideoSubscribers.clear();
+ mVideoInSubscribers.clear();
+ mFocusSubscribers.clear();
+ mShutterSubscribers.clear();
+ mZoomSubscribers.clear();
+ mSnapshotSubscribers.clear();
+ mMetadataSubscribers.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReleaseImageBuffersCallback = callback;
+ mReleaseData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mEndImageCaptureCallback= callback;
+ mEndCaptureData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
+{
+ android::AutoMutex lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
+ {
+ CAMHAL_LOGVB("Frame message type id=0x%x subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.add((int) cookie, callback);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription no supported yet!", frameMsg);
+ break;
+ }
+ }
+
+ if ( eventMsg != 0)
+ {
+ CAMHAL_LOGVB("Event message type id=0x%x subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg )
+ {
+ mFocusSubscribers.add((int) cookie, eventCb);
+ mShutterSubscribers.add((int) cookie, eventCb);
+ mZoomSubscribers.add((int) cookie, eventCb);
+ mMetadataSubscribers.add((int) cookie, eventCb);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription no supported yet!", eventMsg);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
+{
+ android::AutoMutex lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
+ {
+ CAMHAL_LOGVB("Frame message type id=0x%x remove subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::ALL_FRAMES:
+ mFrameSubscribers.removeItem((int) cookie);
+ mFrameDataSubscribers.removeItem((int) cookie);
+ mSnapshotSubscribers.removeItem((int) cookie);
+ mImageSubscribers.removeItem((int) cookie);
+ mRawSubscribers.removeItem((int) cookie);
+ mVideoSubscribers.removeItem((int) cookie);
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription remove not supported yet!", frameMsg);
+ break;
+ }
+ }
+
+ if ( eventMsg != 0 )
+ {
+ CAMHAL_LOGVB("Event message type id=0x%x remove subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg)
+ {
+ //TODO: Process case by case
+ mFocusSubscribers.removeItem((int) cookie);
+ mShutterSubscribers.removeItem((int) cookie);
+ mZoomSubscribers.removeItem((int) cookie);
+ mMetadataSubscribers.removeItem((int) cookie);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription remove not supported yet!", eventMsg);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::addFramePointers(CameraBuffer *frameBuf, android_ycbcr* ycbcr)
+{
+ android::AutoMutex lock(mSubscriberLock);
+
+ if ((frameBuf != NULL) && ( ycbcr != NULL) )
+ {
+ CameraFrame *frame = new CameraFrame;
+ frame->mBuffer = frameBuf;
+ frame->mYuv[0] = (unsigned int)ycbcr->y;
+ frame->mYuv[1] = (unsigned int)ycbcr->cr;
+ mFrameQueue.add(frameBuf, frame);
+
+ CAMHAL_LOGVB("Adding Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]);
+ }
+}
+
+void BaseCameraAdapter::removeFramePointers()
+{
+ android::AutoMutex lock(mSubscriberLock);
+
+ int size = mFrameQueue.size();
+ CAMHAL_LOGVB("Removing %d Frames = ", size);
+ for (int i = 0; i < size; i++)
+ {
+ CameraFrame *frame = (CameraFrame *)mFrameQueue.valueAt(i);
+ CAMHAL_LOGVB("Free Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]);
+ delete frame;
+ }
+ mFrameQueue.clear();
+}
+
+void BaseCameraAdapter::returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t res = NO_ERROR;
+ size_t subscriberCount = 0;
+ int refCount = -1;
+
+ if ( NULL == frameBuf )
+ {
+ CAMHAL_LOGEA("Invalid frameBuf");
+ return;
+ }
+
+ if ( NO_ERROR == res)
+ {
+ android::AutoMutex lock(mReturnFrameLock);
+
+ refCount = getFrameRefCountByType(frameBuf, frameType);
+
+ if(frameType == CameraFrame::PREVIEW_FRAME_SYNC)
+ {
+ mFramesWithDisplay--;
+ }
+ else if(frameType == CameraFrame::VIDEO_FRAME_SYNC)
+ {
+ mFramesWithEncoder--;
+ }
+
+ if ( 0 < refCount )
+ {
+
+ refCount--;
+ setFrameRefCountByType(frameBuf, frameType, refCount);
+
+ if (mRecording) {
+ refCount += getFrameRefCount(frameBuf);
+ }
+
+ }
+ else
+ {
+ CAMHAL_LOGDA("Frame returned when ref count is already zero!!");
+ return;
+ }
+ }
+
+ CAMHAL_LOGVB("REFCOUNT 0x%x %d", frameBuf, refCount);
+
+ if ( NO_ERROR == res )
+ {
+ //check if someone is holding this buffer
+ if ( 0 == refCount )
+ {
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::AutoMutex locker(mBuffersWithDucatiLock);
+ if((mBuffersWithDucati.indexOfKey((int)camera_buffer_get_omx_ptr(frameBuf)) >= 0) &&
+ ((CameraFrame::PREVIEW_FRAME_SYNC == frameType) ||
+ (CameraFrame::SNAPSHOT_FRAME == frameType)))
+ {
+ CAMHAL_LOGE("Buffer already with Ducati!! 0x%x", frameBuf);
+ for(int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ }
+ mBuffersWithDucati.add((int)camera_buffer_get_omx_ptr(frameBuf),1);
+ }
+#endif
+ res = fillThisBuffer(frameBuf, frameType);
+ }
+ }
+
+}
+
+status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3, int value4) {
+ status_t ret = NO_ERROR;
+ struct timeval *refTimestamp;
+ BuffersDescriptor *desc = NULL;
+ CameraFrame *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ switch ( operation ) {
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDA("Use buffers for preview");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffers = desc->mBuffers;
+ mPreviewBuffersLength = desc->mLength;
+ mPreviewBuffersAvailable.clear();
+ mSnapshotBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_PREVIEW,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDA("Use buffers for preview data");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview data buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffers = desc->mBuffers;
+ mPreviewDataBuffersLength = desc->mLength;
+ mPreviewDataBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_MEASUREMENT,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDA("Use buffers for image capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffers = desc->mBuffers;
+ mCaptureBuffersLength = desc->mLength;
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_IMAGE_CAPTURE,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDA("Use buffers for reprocessing");
+ desc = (BuffersDescriptor *) value1;
+
+ if (NULL == desc) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if (ret == NO_ERROR) {
+ ret = setState(operation);
+ }
+
+ if (ret == NO_ERROR) {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffers = desc->mBuffers;
+ mVideoInBuffersAvailable.clear();
+ for (uint32_t i = 0 ; i < desc->mMaxQueueable ; i++) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 1);
+ }
+ ret = useBuffers(CameraAdapter::CAMERA_REPROCESS,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+#endif
+
+ case CameraAdapter::CAMERA_START_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startSmoothZoom(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopSmoothZoom();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Start Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Stop Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Start video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Stop video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_IMAGE_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = takePicture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopImageCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_BRACKET_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value2;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startBracketing(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopBracketing();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_PERFORM_AUTOFOCUS:
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartFocus, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = autoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_CANCEL_AUTOFOCUS:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = cancelAutoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameSize(frame->mWidth, frame->mHeight);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getPictureBufferSize(*frame, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameDataSize(frame->mLength, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_START_FD:
+
+ ret = startFaceDetection();
+
+ break;
+
+ case CameraAdapter::CAMERA_STOP_FD:
+
+ ret = stopFaceDetection();
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+
+ CAMHAL_LOGDA("Use buffers for video (RAW + JPEG) capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc ) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR ) {
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffers = desc->mBuffers;
+ mVideoBuffersLength = desc->mLength;
+ mVideoBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc ) {
+ ret = useBuffers(CameraAdapter::CAMERA_VIDEO,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
+ ret = switchToExecuting();
+ break;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CameraAdapter::CAMERA_SETUP_TUNNEL:
+ ret = setupTunnel(value1, value2, value3, value4);
+ break;
+
+ case CameraAdapter::CAMERA_DESTROY_TUNNEL:
+ ret = destroyTunnel();
+ break;
+#endif
+
+ case CameraAdapter::CAMERA_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+ break;
+
+ default:
+ CAMHAL_LOGEB("Command 0x%x unsupported!", operation);
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyFocusSubscribers(CameraHalEvent::FocusStatus status)
+{
+ event_callback eventCb;
+ CameraHalEvent focusEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mFocusSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No Focus Subscribers!");
+ return NO_INIT;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ if (status == CameraHalEvent::FOCUS_STATUS_PENDING) {
+ gettimeofday(&mStartFocus, NULL);
+ } else {
+ //dump the AF latency
+ CameraHal::PPM("Focus finished in: ", &mStartFocus);
+ }
+#endif
+
+ focusEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == focusEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ focusEvent.mEventType = CameraHalEvent::EVENT_FOCUS_LOCKED;
+ focusEvent.mEventData->focusEvent.focusStatus = status;
+
+ for (unsigned int i = 0 ; i < mFocusSubscribers.size(); i++ )
+ {
+ focusEvent.mCookie = (void *) mFocusSubscribers.keyAt(i);
+ eventCb = (event_callback) mFocusSubscribers.valueAt(i);
+ eventCb ( &focusEvent );
+ }
+
+ focusEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyShutterSubscribers()
+{
+ CameraHalEvent shutterEvent;
+ event_callback eventCb;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mShutterSubscribers.size() == 0 )
+ {
+ CAMHAL_LOGEA("No shutter Subscribers!");
+ return NO_INIT;
+ }
+
+ shutterEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == shutterEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ shutterEvent.mEventType = CameraHalEvent::EVENT_SHUTTER;
+ shutterEvent.mEventData->shutterEvent.shutterClosed = true;
+
+ for (unsigned int i = 0 ; i < mShutterSubscribers.size() ; i++ ) {
+ shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i);
+ eventCb = ( event_callback ) mShutterSubscribers.valueAt(i);
+
+ CAMHAL_LOGD("Sending shutter callback");
+
+ eventCb ( &shutterEvent );
+ }
+
+ shutterEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReached)
+{
+ event_callback eventCb;
+ CameraHalEvent zoomEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mZoomSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No zoom Subscribers!");
+ return NO_INIT;
+ }
+
+ zoomEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == zoomEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ zoomEvent.mEventType = CameraHalEvent::EVENT_ZOOM_INDEX_REACHED;
+ zoomEvent.mEventData->zoomEvent.currentZoomIndex = zoomIdx;
+ zoomEvent.mEventData->zoomEvent.targetZoomIndexReached = targetReached;
+
+ for (unsigned int i = 0 ; i < mZoomSubscribers.size(); i++ ) {
+ zoomEvent.mCookie = (void *) mZoomSubscribers.keyAt(i);
+ eventCb = (event_callback) mZoomSubscribers.valueAt(i);
+
+ eventCb ( &zoomEvent );
+ }
+
+ zoomEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta)
+{
+ event_callback eventCb;
+ CameraHalEvent metaEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mMetadataSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No preview metadata subscribers!");
+ return NO_INIT;
+ }
+
+ metaEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == metaEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ metaEvent.mEventType = CameraHalEvent::EVENT_METADATA;
+ metaEvent.mEventData->metadataEvent = meta;
+
+ for (unsigned int i = 0 ; i < mMetadataSubscribers.size(); i++ ) {
+ metaEvent.mCookie = (void *) mMetadataSubscribers.keyAt(i);
+ eventCb = (event_callback) mMetadataSubscribers.valueAt(i);
+
+ eventCb ( &metaEvent );
+ }
+
+ metaEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
+{
+ status_t ret = NO_ERROR;
+ unsigned int mask;
+
+ if ( NULL == frame )
+ {
+ CAMHAL_LOGEA("Invalid CameraFrame");
+ return -EINVAL;
+ }
+
+ for( mask = 1; mask < CameraFrame::ALL_FRAMES; mask <<= 1){
+ if( mask & frame->mFrameMask ){
+ switch( mask ){
+
+ case CameraFrame::IMAGE_FRAME:
+ {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("Shot to Jpeg: ", &mStartCapture);
+#endif
+ ret = __sendFrameToSubscribers(frame, &mImageSubscribers, CameraFrame::IMAGE_FRAME);
+ }
+ break;
+ case CameraFrame::RAW_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mRawSubscribers, CameraFrame::RAW_FRAME);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mSnapshotSubscribers, CameraFrame::SNAPSHOT_FRAME);
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mVideoSubscribers, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mFrameDataSubscribers, CameraFrame::FRAME_DATA_SYNC);
+ }
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mVideoInSubscribers, CameraFrame::REPROCESS_INPUT_FRAME);
+ }
+ break;
+ default:
+ CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", mask);
+ break;
+ }//SWITCH
+ frame->mFrameMask &= ~mask;
+
+ if (ret != NO_ERROR) {
+ goto EXIT;
+ }
+ }//IF
+ }//FOR
+
+ EXIT:
+ return ret;
+}
+
+status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
+ android::KeyedVector<int, frame_callback> *subscribers,
+ CameraFrame::FrameType frameType)
+{
+ size_t refCount = 0;
+ status_t ret = NO_ERROR;
+ frame_callback callback = NULL;
+
+ frame->mFrameType = frameType;
+
+ if ( (frameType == CameraFrame::PREVIEW_FRAME_SYNC) ||
+ (frameType == CameraFrame::VIDEO_FRAME_SYNC) ||
+ (frameType == CameraFrame::SNAPSHOT_FRAME) ){
+ if (mFrameQueue.size() > 0){
+ CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(frame->mBuffer);
+ frame->mYuv[0] = lframe->mYuv[0];
+ frame->mYuv[1] = frame->mYuv[0] + (frame->mLength + frame->mOffset)*2/3;
+ }
+ else{
+ CAMHAL_LOGDA("Empty Frame Queue");
+ return -EINVAL;
+ }
+ }
+
+ if (NULL != subscribers) {
+ refCount = getFrameRefCountByType(frame->mBuffer, frameType);
+
+ if (refCount == 0) {
+ CAMHAL_LOGDA("Invalid ref count of 0");
+ return -EINVAL;
+ }
+
+ if (refCount > subscribers->size()) {
+ CAMHAL_LOGEB("Invalid ref count for frame type: 0x%x", frameType);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGVB("Type of Frame: 0x%x address: 0x%x refCount start %d",
+ frame->mFrameType,
+ ( uint32_t ) frame->mBuffer,
+ refCount);
+
+ for ( unsigned int i = 0 ; i < refCount; i++ ) {
+ frame->mCookie = ( void * ) subscribers->keyAt(i);
+ callback = (frame_callback) subscribers->valueAt(i);
+
+ if (!callback) {
+ CAMHAL_LOGEB("callback not set for frame type: 0x%x", frameType);
+ return -EINVAL;
+ }
+
+ callback(frame);
+ }
+ } else {
+ CAMHAL_LOGEA("Subscribers is null??");
+ return -EINVAL;
+ }
+
+ return ret;
+}
+
+int BaseCameraAdapter::setInitFrameRefCount(CameraBuffer * buf, unsigned int mask)
+{
+ int ret = NO_ERROR;
+ unsigned int lmask;
+
+ LOG_FUNCTION_NAME;
+
+ if (buf == NULL)
+ {
+ return -EINVAL;
+ }
+
+ for( lmask = 1; lmask < CameraFrame::ALL_FRAMES; lmask <<= 1){
+ if( lmask & mask ){
+ switch( lmask ){
+
+ case CameraFrame::IMAGE_FRAME:
+ {
+ setFrameRefCountByType(buf, CameraFrame::IMAGE_FRAME, (int) mImageSubscribers.size());
+ }
+ break;
+ case CameraFrame::RAW_FRAME:
+ {
+ setFrameRefCountByType(buf, CameraFrame::RAW_FRAME, mRawSubscribers.size());
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ setFrameRefCountByType(buf, CameraFrame::PREVIEW_FRAME_SYNC, mFrameSubscribers.size());
+ }
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ setFrameRefCountByType(buf, CameraFrame::SNAPSHOT_FRAME, mSnapshotSubscribers.size());
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ setFrameRefCountByType(buf,CameraFrame::VIDEO_FRAME_SYNC, mVideoSubscribers.size());
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ setFrameRefCountByType(buf, CameraFrame::FRAME_DATA_SYNC, mFrameDataSubscribers.size());
+ }
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ setFrameRefCountByType(buf,CameraFrame::REPROCESS_INPUT_FRAME, mVideoInSubscribers.size());
+ }
+ break;
+ default:
+ CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", lmask);
+ break;
+ }//SWITCH
+ mask &= ~lmask;
+ }//IF
+ }//FOR
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int BaseCameraAdapter::getFrameRefCount(CameraBuffer * frameBuf)
+{
+ int res = 0, refCnt = 0;
+
+ for (unsigned int frameType = 1; frameType < CameraFrame::ALL_FRAMES; frameType <<= 1) {
+ refCnt = getFrameRefCountByType(frameBuf, static_cast<CameraFrame::FrameType>(frameType));
+ if (refCnt > 0) res += refCnt;
+ }
+ return res;
+}
+
+int BaseCameraAdapter::getFrameRefCountByType(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
+{
+ int res = -1;
+ ssize_t index = NAME_NOT_FOUND;
+
+ LOG_FUNCTION_NAME;
+
+ switch (frameType) {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ android::AutoMutex lock(mCaptureBufferLock);
+ index = mCaptureBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mCaptureBuffersAvailable[index];
+ }
+ break;
+ }
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ android::AutoMutex lock(mSnapshotBufferLock);
+ index = mSnapshotBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mSnapshotBuffersAvailable[index];
+ }
+ break;
+ }
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ index = mPreviewBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mPreviewBuffersAvailable[index];
+ }
+ break;
+ }
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ index = mPreviewDataBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mPreviewDataBuffersAvailable[index];
+ }
+ break;
+ }
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mVideoBufferLock);
+ index = mVideoBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mVideoBuffersAvailable[index];
+ }
+ break;
+ }
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ android::AutoMutex lock(mVideoInBufferLock);
+ index = mVideoInBuffersAvailable.indexOfKey(frameBuf);
+ if (index != NAME_NOT_FOUND) {
+ res = mVideoInBuffersAvailable[index];
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return res;
+}
+
+void BaseCameraAdapter::setFrameRefCountByType(CameraBuffer * frameBuf, CameraFrame::FrameType frameType, int refCount)
+{
+
+ LOG_FUNCTION_NAME;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ android::AutoMutex lock(mSnapshotBufferLock);
+ mSnapshotBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME: {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
+ default:
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t BaseCameraAdapter::startVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mVideoBufferLock);
+
+ //If the capture is already ongoing, return from here.
+ if ( mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+
+ if ( NO_ERROR == ret )
+ {
+
+ mVideoBuffersAvailable.clear();
+
+ for ( unsigned int i = 0 ; i < mPreviewBuffersAvailable.size() ; i++ )
+ {
+ mVideoBuffersAvailable.add(mPreviewBuffersAvailable.keyAt(i), 0);
+ }
+
+ mRecording = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ )
+ {
+ CameraBuffer *frameBuf = mVideoBuffersAvailable.keyAt(i);
+ if( getFrameRefCountByType(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0)
+ {
+ returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+ }
+
+ mRecording = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//-----------------Stub implementation of the interface ------------------------------
+
+status_t BaseCameraAdapter::takePicture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startBracketing(int range)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ notifyFocusSubscribers(CameraHalEvent::FOCUS_STATUS_FAIL);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startSmoothZoom(int targetIdx)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopSmoothZoom()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::switchToExecuting()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+const char* BaseCameraAdapter::getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT) {
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].halDefinition == Value )
+ return LUT.Table[i].userDefinition;
+
+ return NULL;
+}
+
+status_t BaseCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::destroyTunnel() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::cameraPreviewInitialization() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::setState(CameraCommands operation)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ const char *printState = getLUTvalue_translateHAL(operation, CamCommandsLUT);
+
+ mLock.lock();
+
+ switch ( mAdapterState )
+ {
+
+ case INTIALIZED_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_RESOLUTION_PREVIEW:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = %s",
+ printState);
+ mNextState = INTIALIZED_STATE;
+ break;
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ ret = INVALID_OPERATION;
+ break;
+ case CAMERA_CANCEL_AUTOFOCUS:
+ ret = INVALID_OPERATION;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case LOADED_PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_STOP_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->INTIALIZED_STATE event = 0x%x",
+ operation);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ case CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = %s",
+ printState);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = %s",
+ printState);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = %s",
+ printState);
+ mNextState = ZOOM_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
+ case CAMERA_START_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = %s",
+ printState);
+ mNextState = VIDEO_STATE;
+ break;
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case LOADED_REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_CAPTURE_STATE;
+ break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+
+ break;
+
+ case LOADED_REPROCESS_CAPTURE_STATE:
+ switch (operation) {
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+ break;
+#endif
+
+ case LOADED_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = %s",
+ printState);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+ //Hadnle this state for raw capture path.
+ //Just need to keep the same state.
+ //The next CAMERA_START_IMAGE_CAPTURE command will assign the mNextState.
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case CAPTURE_STATE:
+
+ switch ( operation )
+ {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = CAPTURE_STATE;
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = %s",
+ printState);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = ZOOM_STATE;
+ break;
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = %s",
+ printState);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ case CAMERA_START_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
+ mNextState = VIDEO_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = %s",
+ printState);
+ mNextState = VIDEO_AF_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
+ mNextState = VIDEO_ZOOM_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = VIDEO_LOADED_CAPTURE_STATE;
+ break;
+
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = %s",
+ printState);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_AF_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = %s",
+ printState);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_LOADED_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = VIDEO_CAPTURE_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = %s",
+ printState);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
+ mNextState = ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = %s",
+ printState);
+ mNextState = VIDEO_STATE;
+ break;
+
+ case CAMERA_STOP_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
+ mNextState = ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = %s",
+ printState);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch REPROCESS_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+#endif
+
+
+ default:
+ CAMHAL_LOGEA("Invalid Adapter state!");
+ ret = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackToInitializedState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ while ((getState() != INTIALIZED_STATE) && (ret == NO_ERROR)) {
+ ret = rollbackToPreviousState();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackToPreviousState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ CameraAdapter::AdapterState currentState = getState();
+
+ switch (currentState) {
+ case INTIALIZED_STATE:
+ return NO_ERROR;
+
+ case PREVIEW_STATE:
+ ret = sendCommand(CAMERA_STOP_PREVIEW);
+ break;
+
+ case CAPTURE_STATE:
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+#endif
+ ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
+ break;
+
+ case BRACKETING_STATE:
+ ret = sendCommand(CAMERA_STOP_BRACKET_CAPTURE);
+ break;
+
+ case AF_STATE:
+ ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS);
+ break;
+
+ case ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case VIDEO_STATE:
+ ret = sendCommand(CAMERA_STOP_VIDEO);
+ break;
+
+ case VIDEO_AF_STATE:
+ ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS);
+ break;
+
+ case VIDEO_CAPTURE_STATE:
+ ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
+ break;
+
+ case AF_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case VIDEO_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case BRACKETING_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ default:
+ CAMHAL_LOGEA("Invalid Adapter state!");
+ ret = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//State transition finished successfully.
+//Commit the state and unlock the adapter state.
+status_t BaseCameraAdapter::commitState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mAdapterState = mNextState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mNextState = mAdapterState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// getNextState() and getState()
+// publicly exposed functions to retrieve the adapter states
+// please notice that these functions are locked
+CameraAdapter::AdapterState BaseCameraAdapter::getState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mAdapterState;
+}
+
+CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mNextState;
+}
+
+// getNextState() and getState()
+// internal protected functions to retrieve the adapter states
+// please notice that these functions are NOT locked to help
+// internal functions query state in the middle of state
+// transition
+status_t BaseCameraAdapter::getState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mAdapterState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getNextState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mNextState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+//-----------------------------------------------------------------------------
+
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+
+extern "C" status_t CameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+
+ status_t ret = NO_ERROR;
+ status_t err = NO_ERROR;
+ int num_cameras_supported = 0;
+
+ LOG_FUNCTION_NAME;
+
+ supportedCameras = 0;
+#ifdef OMX_CAMERA_ADAPTER
+ //Query OMX cameras
+ err = OMXCameraAdapter_Capabilities( properties_array, starting_camera,
+ max_camera, supportedCameras);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting OMXCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+#ifdef V4L_CAMERA_ADAPTER
+ //Query V4L cameras
+ err = V4LCameraAdapter_Capabilities( properties_array, (const int) supportedCameras,
+ max_camera, num_cameras_supported);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting V4LCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+
+ supportedCameras += num_cameras_supported;
+ CAMHAL_LOGEB("supportedCameras= %d\n", supportedCameras);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+//-----------------------------------------------------------------------------
+
+} // namespace Camera
+} // namespace Ti
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp
new file mode 100644
index 0000000..3c4e698
--- /dev/null
+++ b/camera/BufferSourceAdapter.cpp
@@ -0,0 +1,1007 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "BufferSourceAdapter.h"
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+static int getANWFormat(const char* parameters_format)
+{
+ int format = HAL_PIXEL_FORMAT_TI_NV12;
+
+ if (parameters_format != NULL) {
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ format = HAL_PIXEL_FORMAT_TI_UYVY;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ CAMHAL_LOGDA("RGB565 format selected");
+ // TODO(XXX): not defined yet
+ format = -1;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ format = HAL_PIXEL_FORMAT_TI_Y16;
+ } else {
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ }
+ }
+
+ return format;
+}
+
+static int getUsageFromANW(int format)
+{
+ int usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ // This usage flag indicates to gralloc we want the
+ // buffers to come from system heap
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ break;
+ default:
+ // No special flags needed
+ break;
+ }
+ return usage;
+}
+
+static const char* getFormatFromANW(int format)
+{
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ // Assuming NV12 1D is RAW or Image frame
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ return android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ return android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ default:
+ break;
+ }
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+}
+
+static CameraFrame::FrameType formatToOutputFrameType(const char* format) {
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ // Assuming NV12 1D is RAW or Image frame
+ return CameraFrame::RAW_FRAME;
+ default:
+ break;
+ }
+ return CameraFrame::RAW_FRAME;
+}
+
+static int getHeightFromFormat(const char* format, int stride, int size) {
+ CAMHAL_ASSERT((NULL != format) && (0 <= stride) && (0 <= size));
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ return (size / (3 * stride)) * 2;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ return (size / stride) / 2;
+ default:
+ break;
+ }
+ return 0;
+}
+
+/*--------------------BufferSourceAdapter Class STARTS here-----------------------------*/
+
+
+///Constant definitions
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int BufferSourceAdapter::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
+/**
+ * Display Adapter class STARTS here..
+ */
+BufferSourceAdapter::BufferSourceAdapter() : mBufferCount(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mPixelFormat = NULL;
+ mBuffers = NULL;
+ mFrameProvider = NULL;
+ mBufferSource = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+BufferSourceAdapter::~BufferSourceAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ freeBufferList(mBuffers);
+
+ android::AutoMutex lock(mLock);
+
+ destroy();
+
+ if (mFrameProvider) {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ if (mQueueFrame.get()) {
+ mQueueFrame->requestExit();
+ mQueueFrame.clear();
+ }
+
+ if (mReturnFrame.get()) {
+ mReturnFrame->requestExit();
+ mReturnFrame.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BufferSourceAdapter::initialize()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReturnFrame.clear();
+ mReturnFrame = new ReturnFrame(this);
+ mReturnFrame->run();
+
+ mQueueFrame.clear();
+ mQueueFrame = new QueueFrame(this);
+ mQueueFrame->run();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source)
+{
+ LOG_FUNCTION_NAME;
+
+ if (!source) {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if (mBufferSource) {
+ char id1[OP_STR_SIZE], id2[OP_STR_SIZE];
+ status_t ret;
+
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+
+ ret = extendedOps()->get_id(source, id2, sizeof(id2));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+ if ((0 >= strlen(id1)) || (0 >= strlen(id2))) {
+ CAMHAL_LOGE("Cannot set ST without name: id1:\"%s\" id2:\"%s\"",
+ id1, id2);
+ return NOT_ENOUGH_DATA;
+ }
+ if (0 == strcmp(id1, id2)) {
+ return ALREADY_EXISTS;
+ }
+
+ // client has to unset mBufferSource before being able to set a new one
+ return BAD_VALUE;
+ }
+
+ // Move to new source obj
+ mBufferSource = source;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+bool BufferSourceAdapter::match(const char * str) {
+ char id1[OP_STR_SIZE];
+ status_t ret;
+
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ }
+
+ return strcmp(id1, str) == 0;
+}
+
+int BufferSourceAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( !frameProvider ) {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( NULL != mFrameProvider ) {
+ delete mFrameProvider;
+ }
+
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallback);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier ) {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ return -EINVAL;
+ }
+
+ mErrorNotifier = errorNotifier;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::enableDisplay(int width, int height,
+ struct timeval *refTime)
+{
+ LOG_FUNCTION_NAME;
+ CameraFrame::FrameType frameType;
+
+ if (mFrameProvider == NULL) {
+ // no-op frame provider not set yet
+ return NO_ERROR;
+ }
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_IN) {
+ // only supporting one type of input frame
+ frameType = CameraFrame::REPROCESS_INPUT_FRAME;
+ } else {
+ frameType = formatToOutputFrameType(mPixelFormat);
+ }
+
+ mFrameProvider->enableFrameNotification(frameType);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::disableDisplay(bool cancel_buffer)
+{
+ LOG_FUNCTION_NAME;
+
+ if (mFrameProvider) mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t BufferSourceAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // no-op for BufferSourceAdapter
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void BufferSourceAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight, const char* format,
+ int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ int undequeued = 0;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ int pixFormat = getANWFormat(format);
+ int usage = getUsageFromANW(pixFormat);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
+
+ // Set gralloc usage bits for window.
+ err = mBufferSource->set_usage(mBufferSource, usage);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to BufferSourceAdapter %d", numBufs);
+ // Set the number of buffers needed for this buffer source
+ err = mBufferSource->set_buffer_count(mBufferSource, numBufs);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+
+ // re-calculate height depending on stride and size
+ int height = getHeightFromFormat(format, width, bytes);
+
+ // Set window geometry
+ err = mBufferSource->set_buffers_geometry(mBufferSource,
+ width, height,
+ pixFormat);
+
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return NULL;
+ }
+
+ if ( mBuffers == NULL ) {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ for (i = 0; i < mBufferCount; i++ ) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, i);
+
+ bytes = CameraHal::calculateBufferSize(format, width, height);
+ }
+
+ for( i = 0; i < mBufferCount-undequeued; i++ ) {
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mapper.unlock(*handle);
+
+ err = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
+ }
+
+ mFrameWidth = width;
+ mFrameHeight = height;
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_OUT;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ int err = mBufferSource->cancel_buffer(mBufferSource,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ break;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
+ }
+
+ freeBufferList(mBuffers);
+
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+CameraBuffer *BufferSourceAdapter::getBuffers(bool reset) {
+ int undequeued = 0;
+ status_t err;
+ android::Mutex::Autolock lock(mLock);
+
+ if (!mBufferSource || !mBuffers) {
+ CAMHAL_LOGE("Adapter is not set up properly: "
+ "mBufferSource:%p mBuffers:%p",
+ mBufferSource, mBuffers);
+ goto fail;
+ }
+
+ // CameraHal is indicating to us that the state of the mBuffer
+ // might have changed. We might need to check the state of the
+ // buffer list and pass a new one depending on the state of our
+ // surface
+ if (reset) {
+ const int lnumBufs = mBufferCount;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+ void *y_uv[2];
+ CameraBuffer * newBuffers = NULL;
+ unsigned int index = 0;
+ android::KeyedVector<void*, int> missingIndices;
+
+ newBuffers = new CameraBuffer [lnumBufs];
+ memset (newBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ // Use this vector to figure out missing indices
+ for (int i = 0; i < mBufferCount; i++) {
+ missingIndices.add(mBuffers[i].opaque, i);
+ }
+
+ // assign buffers that we have already dequeued
+ for (index = 0; index < mFramesWithCameraAdapterMap.size(); index++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(index);
+ newBuffers[index].opaque = mBuffers[value].opaque;
+ newBuffers[index].type = mBuffers[value].type;
+ newBuffers[index].format = mBuffers[value].format;
+ newBuffers[index].mapped = mBuffers[value].mapped;
+ mFramesWithCameraAdapterMap.replaceValueAt(index, index);
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ // dequeue the rest of the buffers
+ for (index; index < (unsigned int)(mBufferCount-undequeued); index++) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ newBuffers[index].opaque = (void *)handle;
+ newBuffers[index].type = CAMERA_BUFFER_ANW;
+ newBuffers[index].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, index);
+
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ newBuffers[index].mapped = y_uv[0];
+ CAMHAL_LOGDB("got handle %p", handle);
+
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ // now we need to figure out which buffers aren't dequeued
+ // which are in mBuffers but not newBuffers yet
+ if ((mBufferCount - index) != missingIndices.size()) {
+ CAMHAL_LOGD("Hrmm somethings gone awry. We are missing a different number"
+ " of buffers than we can fill");
+ }
+ for (unsigned int i = 0; i < missingIndices.size(); i++) {
+ int j = missingIndices.valueAt(i);
+
+ CAMHAL_LOGD("Filling at %d", j);
+ newBuffers[index].opaque = mBuffers[j].opaque;
+ newBuffers[index].type = mBuffers[j].type;
+ newBuffers[index].format = mBuffers[j].format;
+ newBuffers[index].mapped = mBuffers[j].mapped;
+ }
+
+ delete [] mBuffers;
+ mBuffers = newBuffers;
+ }
+
+ return mBuffers;
+
+ fail:
+ return NULL;
+}
+
+unsigned int BufferSourceAdapter::getSize() {
+ android::Mutex::Autolock lock(mLock);
+ return CameraHal::calculateBufferSize(mPixelFormat, mFrameWidth, mFrameHeight);
+}
+
+int BufferSourceAdapter::getBufferCount() {
+ int count = -1;
+
+ android::Mutex::Autolock lock(mLock);
+ if (mBufferSource) extendedOps()->get_buffer_count(mBufferSource, &count);
+ return count;
+}
+
+CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
+ LOG_FUNCTION_NAME;
+ status_t err;
+ const int lnumBufs = 1;
+ int formatSource;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ buffer_handle_t *handle;
+
+ // TODO(XXX): Only supporting one input buffer at a time right now
+ *num = 1;
+ mBufferCount = *num;
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ err = extendedOps()->update_and_get_buffer(mBufferSource,
+ &handle,
+ &mBuffers[0].stride,
+ &mBuffers[0].privateData);
+ if (err != 0) {
+ CAMHAL_LOGEB("update and get buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGD("got handle %p", handle);
+ mBuffers[0].opaque = (void *)handle;
+ mBuffers[0].type = CAMERA_BUFFER_ANW;
+ mFramesWithCameraAdapterMap.add(handle, 0);
+
+ err = extendedOps()->get_buffer_dimension(mBufferSource, &mBuffers[0].width, &mBuffers[0].height);
+ err = extendedOps()->get_buffer_format(mBufferSource, &formatSource);
+
+ int t, l, r, b, w, h;
+ err = extendedOps()->get_crop(mBufferSource, &l, &t, &r, &b);
+ err = extendedOps()->get_current_size(mBufferSource, &w, &h);
+
+ // lock buffer
+ {
+ void *y_uv[2];
+ android::Rect bounds(mBuffers[0].width, mBuffers[0].height);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[0].mapped = y_uv[0];
+ }
+
+ mFrameWidth = mBuffers[0].width;
+ mFrameHeight = mBuffers[0].height;
+ mPixelFormat = getFormatFromANW(formatSource);
+
+ mBuffers[0].format = mPixelFormat;
+ mBuffers[0].actual_size = CameraHal::calculateBufferSize(mPixelFormat, w, h);
+ mBuffers[0].offset = t * w + l * CameraHal::getBPP(mPixelFormat);
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_IN;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ freeBufferList(mBuffers);
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+}
+
+uint32_t * BufferSourceAdapter::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int BufferSourceAdapter::minUndequeueableBuffers(int& undequeueable) {
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+
+ if(!mBufferSource)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeueable);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return -ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+int BufferSourceAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0) {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int BufferSourceAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+
+}
+
+status_t BufferSourceAdapter::returnBuffersToWindow()
+{
+ status_t ret = NO_ERROR;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ //Give the buffers back to display here - sort of free it
+ if (mBufferSource) {
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
+
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ return -ret;
+ } else if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ strerror(-ret),
+ -ret);
+ return -ret;
+ }
+ }
+ } else {
+ CAMHAL_LOGE("mBufferSource is NULL");
+ }
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+
+ return ret;
+
+}
+
+int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ if ( mBuffers != buflist ) {
+ return BAD_VALUE;
+ }
+
+ android::AutoMutex lock(mLock);
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow();
+
+ if( mBuffers != NULL)
+ {
+ delete [] mBuffers;
+ mBuffers = NULL;
+ }
+
+ return NO_ERROR;
+}
+
+
+bool BufferSourceAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+void BufferSourceAdapter::addFrame(CameraFrame* frame)
+{
+ if (mQueueFrame.get()) {
+ mQueueFrame->addFrame(frame);
+ }
+}
+
+void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
+{
+ status_t ret = NO_ERROR;
+ buffer_handle_t *handle = NULL;
+ int i;
+ uint32_t x, y;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ android::AutoMutex lock(mLock);
+
+ if (!mBuffers || !frame->mBuffer) {
+ CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?");
+ return;
+ }
+
+ for ( i = 0; i < mBufferCount; i++ ) {
+ if (frame->mBuffer == &mBuffers[i]) {
+ break;
+ }
+ }
+
+ if (i >= mBufferCount) {
+ CAMHAL_LOGD("Can't find frame in buffer list");
+ if (frame->mFrameType != CameraFrame::REPROCESS_INPUT_FRAME) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ static_cast<CameraFrame::FrameType>(frame->mFrameType));
+ }
+ return;
+ }
+
+ handle = (buffer_handle_t *) mBuffers[i].opaque;
+
+ // Handle input buffers
+ // TODO(XXX): Move handling of input buffers out of here if
+ // it becomes more complex
+ if (frame->mFrameType == CameraFrame::REPROCESS_INPUT_FRAME) {
+ CAMHAL_LOGD("Unlock %p (buffer #%d)", handle, i);
+ mapper.unlock(*handle);
+ extendedOps()->release_buffer(mBufferSource, mBuffers[i].privateData);
+ return;
+ }
+
+ CameraHal::getXYFromOffset(&x, &y, frame->mOffset, frame->mAlignment, mPixelFormat);
+ CAMHAL_LOGVB("offset = %u left = %d top = %d right = %d bottom = %d",
+ frame->mOffset, x, y, x + frame->mWidth, y + frame->mHeight);
+ ret = mBufferSource->set_crop(mBufferSource, x, y, x + frame->mWidth, y + frame->mHeight);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("mBufferSource->set_crop returned error %d", ret);
+ goto fail;
+ }
+
+ if ( NULL != frame->mMetaData.get() ) {
+ camera_memory_t *extMeta = frame->mMetaData->getExtendedMetadata();
+ if ( NULL != extMeta ) {
+ camera_metadata_t *metaData = static_cast<camera_metadata_t *> (extMeta->data);
+ metaData->timestamp = frame->mTimestamp;
+ ret = extendedOps()->set_metadata(mBufferSource, extMeta);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::set_metadata returned error %d", ret);
+ goto fail;
+ }
+ }
+ }
+
+ // unlock buffer before enqueueing
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->enqueue_buffer(mBufferSource, handle);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
+ goto fail;
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) frame->mBuffer->opaque);
+
+ return;
+
+fail:
+ mFramesWithCameraAdapterMap.clear();
+ mBufferSource = NULL;
+ mReturnFrame->requestExit();
+ mQueueFrame->requestExit();
+}
+
+
+bool BufferSourceAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t *buf;
+ int i = 0;
+ int stride; // dummy variable to get stride
+ CameraFrame::FrameType type;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ void *y_uv[2];
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+
+ android::AutoMutex lock(mLock);
+
+ if ( (NULL == mBufferSource) || (NULL == mBuffers) ) {
+ return false;
+ }
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ err = mBufferSource->lock_buffer(mBufferSource, buf);
+ if (err != 0) {
+ CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ mapper.lock(*buf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+
+ for(i = 0; i < mBufferCount; i++) {
+ if (mBuffers[i].opaque == buf)
+ break;
+ }
+
+ if (i >= mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
+
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount - 1);
+
+ mFrameProvider->returnFrame(&mBuffers[i], formatToOutputFrameType(mPixelFormat));
+ return true;
+}
+
+void BufferSourceAdapter::frameCallback(CameraFrame* caFrame)
+{
+ if ((NULL != caFrame) && (NULL != caFrame->mCookie)) {
+ BufferSourceAdapter *da = (BufferSourceAdapter*) caFrame->mCookie;
+ da->addFrame(caFrame);
+ } else {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p",
+ caFrame, caFrame ? caFrame->mCookie : NULL);
+ }
+}
+
+/*--------------------BufferSourceAdapter Class ENDS here-----------------------------*/
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
new file mode 100644
index 0000000..51437b9
--- /dev/null
+++ b/camera/CameraHal.cpp
@@ -0,0 +1,4749 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#include "CameraHal.h"
+#include "ANativeWindowDisplayAdapter.h"
+#include "BufferSourceAdapter.h"
+#include "TICameraParameters.h"
+#include "CameraProperties.h"
+#include <cutils/properties.h>
+
+#include <poll.h>
+#include <math.h>
+
+namespace Ti {
+namespace Camera {
+
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t, CameraHal*);
+
+/*****************************************************************************/
+
+////Constant definitions and declarations
+////@todo Have a CameraProperties class to store these parameters as constants for every camera
+//// Currently, they are hard-coded
+
+const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 5;
+const int CameraHal::SW_SCALING_FPS_LIMIT = 15;
+
+const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 16;
+
+const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0;
+
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+// HACK: Default path to directory where RAW images coming from video port will be saved to.
+// If directory not exists the saving is skipped and video port frame is ignored.
+// The directory name is choosed in so weird way to enable RAW images saving only when
+// directory has been created explicitly by user.
+extern const char * const kRawImagesOutputDirPath = "/data/misc/camera/RaW_PiCtUrEs";
+extern const char * const kYuvImagesOutputDirPath = "/data/misc/camera/YuV_PiCtUrEs";
+#endif
+
+/******************************************************************************/
+
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+static int dummy_update_and_get_buffer(preview_stream_ops_t*, buffer_handle_t**, int*,int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_release_buffer(preview_stream_ops_t*, int slot) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_dimension(preview_stream_ops_t*, int*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_format(preview_stream_ops_t*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_set_metadata(preview_stream_ops_t*, const camera_memory_t*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_id(preview_stream_ops_t*, char *data, unsigned int dataSize) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_count(preview_stream_ops_t*, int *count) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_crop(preview_stream_ops_t*,
+ int *, int *, int *, int *) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_current_size(preview_stream_ops_t*,
+ int *, int *) {
+ return INVALID_OPERATION;
+}
+#endif
+
+
+CameraHal::SocFamily CameraHal::getSocFamily() {
+ static const struct {
+ const char *name;
+ const CameraHal::SocFamily value;
+ } socFamilyArray[] = {
+ {"OMAP4430", SocFamily_Omap4430},
+ {"OMAP4460", SocFamily_Omap4460},
+ {"OMAP4470", SocFamily_Omap4470}
+ };
+ // lets get the soc family string from sysfs
+ static const char *sysfsNode = "/sys/board_properties/soc/family";
+ FILE *sysfsFd = fopen(sysfsNode, "r");
+ static const int bufSize = 128;
+ char buf[bufSize];
+ if (sysfsFd == NULL) {
+ CAMHAL_LOGEA("'%s' Not Available", sysfsNode);
+ return SocFamily_Undefined;
+ }
+ const char *res = fgets(buf, bufSize, sysfsFd);
+ fclose(sysfsFd);
+ if (res == NULL) {
+ CAMHAL_LOGEA("Error reading '%s'", sysfsNode);
+ return SocFamily_Undefined;
+ }
+ // translate it to CameraHal::SocFamily enum
+ for (int i = 0; i < SocFamily_ElementCount; ++i) {
+ if (strncmp(socFamilyArray[i].name, buf, strlen(socFamilyArray[i].name)) == 0) {
+ return socFamilyArray[i].value;
+ }
+ }
+ return SocFamily_Undefined;
+}
+
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+static preview_stream_extended_ops_t dummyPreviewStreamExtendedOps = {
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ dummy_update_and_get_buffer,
+ dummy_release_buffer,
+ dummy_get_buffer_dimension,
+ dummy_get_buffer_format,
+ dummy_set_metadata,
+ dummy_get_id,
+ dummy_get_buffer_count,
+ dummy_get_crop,
+ dummy_get_current_size,
+#endif
+};
+#endif
+
+
+DisplayAdapter::DisplayAdapter()
+{
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedOps = &dummyPreviewStreamExtendedOps;
+#endif
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+void DisplayAdapter::setExtendedOps(preview_stream_extended_ops_t * extendedOps) {
+ mExtendedOps = extendedOps ? extendedOps : &dummyPreviewStreamExtendedOps;
+}
+#endif
+
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::mStartPreview;
+struct timeval CameraHal::mStartFocus;
+struct timeval CameraHal::mStartCapture;
+
+#endif
+
+static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
+ CameraHal *camera = NULL;
+
+ if (cookie) {
+ camera = (CameraHal*) cookie;
+ camera->onOrientationEvent(orientation, tilt);
+ }
+
+}
+
+/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
+
+/**
+ Callback function to receive orientation events from SensorListener
+ */
+void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->onOrientationEvent(orientation, tilt);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Set the notification and data callbacks
+
+ @param[in] notify_cb Notify callback for notifying the app about events and errors
+ @param[in] data_cb Buffer callback for sending the preview/raw frames to the app
+ @param[in] data_cb_timestamp Buffer callback for sending the video frames w/ timestamp
+ @param[in] user Callback cookie
+ @return none
+
+ */
+void CameraHal::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setCallbacks(this,
+ notify_cb,
+ data_cb,
+ data_cb_timestamp,
+ get_memory,
+ user);
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->setSharedAllocator(get_memory);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Enable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to enable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::enableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( ( msgType & CAMERA_MSG_SHUTTER ) && ( !mShutterEnabled ) )
+ {
+ msgType &= ~CAMERA_MSG_SHUTTER;
+ }
+
+ // ignoring enable focus message from camera service
+ // we will enable internally in autoFocus call
+ msgType &= ~CAMERA_MSG_FOCUS;
+#ifdef ANDROID_API_JB_OR_LATER
+ msgType &= ~CAMERA_MSG_FOCUS_MOVE;
+#endif
+
+ {
+ android::AutoMutex lock(mLock);
+ mMsgEnabled |= msgType;
+ }
+
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ if(mDisplayPaused)
+ {
+ CAMHAL_LOGDA("Preview currently paused...will enable preview callback when restarted");
+ msgType &= ~CAMERA_MSG_PREVIEW_FRAME;
+ }else
+ {
+ CAMHAL_LOGDA("Enabling Preview Callback");
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDB("Preview callback not enabled %x", msgType);
+ }
+
+
+ ///Configure app callback notifier with the message callback required
+ mAppCallbackNotifier->enableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Disable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to disable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::disableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ {
+ android::AutoMutex lock(mLock);
+ mMsgEnabled &= ~msgType;
+ }
+
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ CAMHAL_LOGDA("Disabling Preview Callback");
+ }
+
+ ///Configure app callback notifier
+ mAppCallbackNotifier->disableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Query whether a message, or a set of messages, is enabled.
+
+ Note that this is operates as an AND, if any of the messages queried are off, this will
+ return false.
+
+ @param[in] msgtype Bitmask of the messages to query (defined in include/ui/Camera.h)
+ @return true If all message types are enabled
+ false If any message type
+
+ */
+int CameraHal::msgTypeEnabled(int32_t msgType)
+{
+ int32_t msgEnabled = 0;
+
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
+ msgEnabled = mMsgEnabled;
+ if (!previewEnabled() && !mPreviewInitializationDone) {
+ msgEnabled &= ~(CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_PREVIEW_METADATA);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return (msgEnabled & msgType);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const char* parameters)
+{
+
+ LOG_FUNCTION_NAME;
+
+ android::CameraParameters params;
+
+ android::String8 str_params(parameters);
+ params.unflatten(str_params);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return setParameters(params);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const android::CameraParameters& params)
+{
+
+ LOG_FUNCTION_NAME;
+
+ int w, h;
+ int framerate;
+ const char *valstr = NULL;
+ int varint = 0;
+ status_t ret = NO_ERROR;
+ // Needed for KEY_RECORDING_HINT
+ bool restartPreviewRequired = false;
+ bool updateRequired = false;
+ android::CameraParameters oldParams = mParameters;
+
+#ifdef V4L_CAMERA_ADAPTER
+ if (strcmp (V4L_CAMERA_NAME_USB, mCameraProperties->get(CameraProperties::CAMERA_NAME)) == 0 ) {
+ updateRequired = true;
+ }
+#endif
+
+ {
+ android::AutoMutex lock(mLock);
+
+ ///Ensure that preview is not enabled when the below parameters are changed.
+ if(!previewEnabled())
+ {
+ if ((valstr = params.getPreviewFormat()) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) {
+ mParameters.setPreviewFormat(valstr);
+ CAMHAL_LOGDB("PreviewFormat set %s", valstr);
+ } else {
+ CAMHAL_LOGEB("Invalid preview format: %s. Supported: %s", valstr,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_VNF)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::VNF_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VNF %s", valstr);
+ mParameters.set(TICameraParameters::KEY_VNF, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_VNF,
+ android::CameraParameters::FALSE);
+ }
+ }
+
+ if ((valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ // make sure we support vstab...if we don't and application is trying to set
+ // vstab then return an error
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ android::CameraParameters::TRUE) != 0 &&
+ strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL) {
+
+ if (strcmp(TICameraParameters::VIDEO_MODE, valstr)) {
+ mCapModeBackup = valstr;
+ }
+
+ CAMHAL_LOGDB("Capture mode set %s", valstr);
+
+ const char *currentMode = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( NULL != currentMode ) {
+ if ( strcmp(currentMode, valstr) != 0 ) {
+ updateRequired = true;
+ }
+ } else {
+ updateRequired = true;
+ }
+
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
+ } else if (!mCapModeBackup.isEmpty()) {
+ // Restore previous capture mode after stopPreview()
+ mParameters.set(TICameraParameters::KEY_CAP_MODE,
+ mCapModeBackup.string());
+ updateRequired = true;
+ }
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ if ((valstr = params.get(TICameraParameters::KEY_VTC_HINT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VTC_HINT, valstr);
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ mVTCUseCase = true;
+ } else {
+ mVTCUseCase = false;
+ }
+ CAMHAL_LOGDB("VTC Hint = %d", mVTCUseCase);
+ }
+
+ if (mVTCUseCase) {
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE, valstr);
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT, valstr);
+ }
+ }
+#endif
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
+ if ((mParameters.get(TICameraParameters::KEY_IPP) == NULL) ||
+ (strcmp(valstr, mParameters.get(TICameraParameters::KEY_IPP)))) {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(TICameraParameters::KEY_IPP));
+ mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ restartPreviewRequired = true;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL )
+ {
+ if (strcmp(valstr, mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)))
+ {
+ CAMHAL_LOGDB("Stereo 3D preview image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, valstr);
+ restartPreviewRequired = true;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ int orientation =0;
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(TICameraParameters::KEY_SENSOR_ORIENTATION),
+ updateRequired);
+
+ orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( orientation < 0 || orientation >= 360 || (orientation%90) != 0 ) {
+ CAMHAL_LOGE("Invalid sensor orientation: %s. Value must be one of: [0, 90, 180, 270]", valstr);
+ return BAD_VALUE;
+ }
+
+ CAMHAL_LOGD("Sensor Orientation is set to %d", orientation);
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ }
+#endif
+
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return BAD_VALUE;
+ }
+
+ mVideoWidth = w;
+ mVideoHeight = h;
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = params.get(android::CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
+ {
+ CAMHAL_LOGVB("Video Resolution: %d x %d", mVideoWidth, mVideoHeight);
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+ else if(strcmp(valstr, android::CameraParameters::FALSE) == 0)
+ {
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
+ restartPreviewRequired |= resetVideoModeParameters();
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid RECORDING_HINT");
+ return BAD_VALUE;
+ }
+ }
+ else
+ {
+ // This check is required in following case.
+ // If VideoRecording activity sets KEY_RECORDING_HINT to TRUE and
+ // ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
+ // then Video Mode parameters may remain present in ImageCapture activity as well.
+ CAMHAL_LOGDA("Recording Hint is set to NULL");
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, "");
+ restartPreviewRequired |= resetVideoModeParameters();
+ }
+
+ if ( (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES))) ) {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ return BAD_VALUE;
+ }
+
+ int oldWidth, oldHeight;
+ mParameters.getPreviewSize(&oldWidth, &oldHeight);
+ if ( ( oldWidth != w ) || ( oldHeight != h ) )
+ {
+ mParameters.setPreviewSize(w, h);
+ restartPreviewRequired = true;
+ }
+
+ CAMHAL_LOGDB("Preview Resolution: %d x %d", w, h);
+
+ if ((valstr = params.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
+ CAMHAL_LOGDB("Focus mode set %s", valstr);
+
+ // we need to take a decision on the capture mode based on whether CAF picture or
+ // video is chosen so the behavior of each is consistent to the application
+ if(strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ restartPreviewRequired |= resetVideoModeParameters();
+ } else if (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+
+ mParameters.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ mRawCapture = false;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
+ }
+#endif
+
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL )
+ {
+ CAMHAL_LOGDB("Stereo 3D capture image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, valstr);
+ }
+
+ params.getPictureSize(&w, &h);
+ if ( (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES))) ) {
+ mParameters.setPictureSize(w, h);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture resolution %d x %d", w, h);
+ return BAD_VALUE;
+ }
+
+ CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
+
+ if ( (valstr = params.getPictureFormat()) != NULL ) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
+ if ((strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT)) {
+ unsigned int width = 0, height = 0;
+ // Set picture size to full frame for raw bayer capture
+ width = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH));
+ height = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT));
+ mParameters.setPictureSize(width,height);
+ }
+ mParameters.setPictureFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
+ ret = BAD_VALUE;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ if ((valstr = params.get(TICameraParameters::KEY_BURST)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_BURST) >=0) {
+ CAMHAL_LOGDB("Burst set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_BURST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Burst value: %s",valstr);
+ return BAD_VALUE;
+ }
+ }
+#endif
+
+ // Variable framerate ranges have higher priority over
+ // deprecated constant FPS.
+ // There is possible 3 situations :
+ // 1) User change FPS range and HAL use it for fps port value (don't care about
+ // changed or not const FPS).
+ // 2) User change single FPS and not change FPS range - will be applyed single FPS value
+ // to port.
+ // 3) Both FPS range and const FPS are unchanged - FPS range will be applied to port.
+
+ int curFramerate = 0;
+ bool frameRangeUpdated = false, fpsUpdated = false;
+ int curMaxFPS = 0, curMinFPS = 0, maxFPS = 0, minFPS = 0;
+
+ mParameters.getPreviewFpsRange(&curMinFPS, &curMaxFPS);
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+
+ curFramerate = mParameters.getPreviewFrameRate();
+ framerate = params.getPreviewFrameRate();
+
+ valstr = params.get(android::CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ if (valstr != NULL && strlen(valstr) &&
+ ((curMaxFPS != maxFPS) || (curMinFPS != minFPS))) {
+ CAMHAL_LOGDB("## current minFPS = %d; maxFPS=%d", curMinFPS, curMaxFPS);
+ CAMHAL_LOGDB("## requested minFPS = %d; maxFPS=%d", minFPS, maxFPS);
+ if (!isFpsRangeValid(minFPS, maxFPS, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)) &&
+ !isFpsRangeValid(minFPS, maxFPS, params.get(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Trying to set invalid FPS Range (%d,%d)", minFPS, maxFPS);
+ return BAD_VALUE;
+ }
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ if ( curMaxFPS == (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) &&
+ maxFPS < (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) ) {
+ restartPreviewRequired = true;
+ }
+ frameRangeUpdated = true;
+ }
+
+ valstr = params.get(android::CameraParameters::KEY_PREVIEW_FRAME_RATE);
+ if (valstr != NULL && strlen(valstr) && (framerate != curFramerate)) {
+ CAMHAL_LOGD("current framerate = %d reqested framerate = %d", curFramerate, framerate);
+ if (!isParameterValid(framerate, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)) &&
+ !isParameterValid(framerate, params.get(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Trying to set invalid frame rate %d", framerate);
+ return BAD_VALUE;
+ }
+ mParameters.setPreviewFrameRate(framerate);
+ CAMHAL_LOGDB("Set frame rate %d", framerate);
+ fpsUpdated = true;
+ }
+
+ if (frameRangeUpdated) {
+ mParameters.set(TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE,
+ mParameters.get(android::CameraParameters::KEY_PREVIEW_FPS_RANGE));
+ } else if (fpsUpdated) {
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
+ sprintf(tmpBuffer, "%d,%d", framerate * CameraHal::VFR_SCALE, framerate * CameraHal::VFR_SCALE);
+ mParameters.set(TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE, tmpBuffer);
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
+ }
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_GLBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GLBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GLBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
+ }
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
+ }
+
+#ifdef OMAP_ENHANCEMENT_S3D
+ ///Update the current parameter set
+ if ( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE)) != NULL ) {
+ CAMHAL_LOGDB("AutoConvergence mode set = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, valstr);
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE)) != NULL ) {
+ int manualConvergence = (int)strtol(valstr, 0, 0);
+
+ if ( ( manualConvergence < strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN), 0, 0) ) ||
+ ( manualConvergence > strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX), 0, 0) ) ) {
+ CAMHAL_LOGEB("ERROR: Invalid Manual Convergence = %d", manualConvergence);
+ return BAD_VALUE;
+ } else {
+ CAMHAL_LOGDB("ManualConvergence Value = %d", manualConvergence);
+ mParameters.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, valstr);
+ }
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION)) != NULL) {
+ if ( strcmp(mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED),
+ android::CameraParameters::TRUE) == 0 ) {
+ CAMHAL_LOGDB("Mechanical Mialignment Correction is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) {
+ CAMHAL_LOGDB("Exposure mode set = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr);
+ if (!strcmp(valstr, TICameraParameters::EXPOSURE_MODE_MANUAL)) {
+ int manualVal;
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, valstr);
+ }
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Exposure mode = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+#endif
+
+ if ((valstr = params.get(android::CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
+ CAMHAL_LOGDB("White balance set %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_WHITE_BALANCE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ) {
+ CAMHAL_LOGDB("Contrast set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_CONTRAST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Contrast = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr =params.get(TICameraParameters::KEY_SHARPNESS)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_SHARPNESS) >= 0 ) {
+ CAMHAL_LOGDB("Sharpness set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_SHARPNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Sharpness = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_SATURATION)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_SATURATION) >= 0 ) {
+ CAMHAL_LOGDB("Saturation set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_SATURATION, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Saturation = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_BRIGHTNESS)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_BRIGHTNESS) >= 0 ) {
+ CAMHAL_LOGDB("Brightness set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_BRIGHTNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Brightness = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+#endif
+
+ if ((valstr = params.get(android::CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
+ CAMHAL_LOGDB("Antibanding set %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_ANTIBANDING, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) {
+ CAMHAL_LOGDB("ISO set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ISO, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid ISO = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+#endif
+
+ if( (valstr = params.get(android::CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ {
+ CAMHAL_LOGDB("Focus areas position set %s", params.get(android::CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(android::CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Measurements set to %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
+
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+
+ }
+#endif
+
+ if( (valstr = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ }
+
+ if ((valstr = params.get(android::CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
+ CAMHAL_LOGDB("Scene mode set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(android::CameraParameters::KEY_SCENE_MODE),
+ updateRequired);
+ mParameters.set(android::CameraParameters::KEY_SCENE_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr = params.get(android::CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
+ CAMHAL_LOGDB("Flash mode set %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if ((valstr = params.get(android::CameraParameters::KEY_EFFECT)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
+ CAMHAL_LOGDB("Effect set %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_EFFECT, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ varint = params.getInt(android::CameraParameters::KEY_ROTATION);
+ if ( varint >= 0 ) {
+ CAMHAL_LOGDB("Rotation set %d", varint);
+ mParameters.set(android::CameraParameters::KEY_ROTATION, varint);
+ }
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= 0 ) {
+ CAMHAL_LOGDB("Jpeg quality set %d", varint);
+ mParameters.set(android::CameraParameters::KEY_JPEG_QUALITY, varint);
+ }
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
+ CAMHAL_LOGDB("Thumbnail width set %d", varint);
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint);
+ }
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
+ CAMHAL_LOGDB("Thumbnail width set %d", varint);
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint);
+ }
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= 0 ) {
+ CAMHAL_LOGDB("Thumbnail quality set %d", varint);
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(android::CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LATITUDE, valstr);
+ }else{
+ mParameters.remove(android::CameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(android::CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ }else{
+ mParameters.remove(android::CameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(android::CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ }else{
+ mParameters.remove(android::CameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(android::CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(android::CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ }else{
+ mParameters.remove(android::CameraParameters::KEY_GPS_TIMESTAMP);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS datestamp set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GPS_DATESTAMP, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(android::CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ }else{
+ mParameters.remove(android::CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GPS_MAPDATUM, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_MAPDATUM);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_VERSION)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GPS_VERSION, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_VERSION);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MODEL)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Model set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_EXIF_MODEL, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MAKE)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Make set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_EXIF_MAKE, valstr);
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
+ {
+ CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
+ }
+ else if ((valstr = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ CAMHAL_LOGDB("ABS Exposure+Gain Bracketing set %s", params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ } else
+ {
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE)) != NULL ) {
+ CAMHAL_LOGDB("Zoom Bracketing range %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ }
+#endif
+
+ if ((valstr = params.get(android::CameraParameters::KEY_ZOOM)) != NULL ) {
+ varint = atoi(valstr);
+ if ( varint >= 0 && varint <= mMaxZoomSupported ) {
+ CAMHAL_LOGDB("Zoom set %d", varint);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(android::CameraParameters::KEY_ZOOM),
+ updateRequired);
+ mParameters.set(android::CameraParameters::KEY_ZOOM, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
+ return BAD_VALUE;
+ }
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto Exposure Lock set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ updateRequired);
+ mParameters.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ updateRequired);
+ mParameters.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ }
+ if( (valstr = params.get(android::CameraParameters::KEY_METERING_AREAS)) != NULL )
+ {
+ CAMHAL_LOGDB("Metering areas position set %s", params.get(android::CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(android::CameraParameters::KEY_METERING_AREAS, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::RAW_WIDTH)) != NULL ) {
+ CAMHAL_LOGDB("Raw image width set %s", params.get(TICameraParameters::RAW_WIDTH));
+ mParameters.set(TICameraParameters::RAW_WIDTH, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::RAW_HEIGHT)) != NULL ) {
+ CAMHAL_LOGDB("Raw image height set %s", params.get(TICameraParameters::RAW_HEIGHT));
+ mParameters.set(TICameraParameters::RAW_HEIGHT, valstr);
+ }
+
+ //TI extensions for enable/disable algos
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA)) != NULL )
+ {
+ CAMHAL_LOGDB("External Gamma set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF1)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF1 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF1, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF2)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF2 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF2, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_SHARPENING)) != NULL )
+ {
+ CAMHAL_LOGDB("Sharpening set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_SHARPENING, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_THREELINCOLORMAP)) != NULL )
+ {
+ CAMHAL_LOGDB("Color Conversion set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_GIC)) != NULL )
+ {
+ CAMHAL_LOGDB("Green Inballance Correction set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_GIC, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GAMMA_TABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Manual gamma table set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GAMMA_TABLE, valstr);
+ }
+
+ android::CameraParameters adapterParams = mParameters;
+
+#ifdef OMAP_ENHANCEMENT
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
+ {
+ int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
+ if ( 0 < posBracketRange )
+ {
+ mBracketRangePositive = posBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Positive bracketing range %d", mBracketRangePositive);
+
+
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG) )
+ {
+ int negBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG);
+ if ( 0 < negBracketRange )
+ {
+ mBracketRangeNegative = negBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
+
+ if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 )) {
+ if ( !mBracketingEnabled ) {
+ CAMHAL_LOGDA("Enabling bracketing");
+ mBracketingEnabled = true;
+ } else {
+ CAMHAL_LOGDA("Bracketing already enabled");
+ }
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ } else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 )) {
+ CAMHAL_LOGDA("Disabling bracketing");
+
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mBracketingEnabled = false;
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
+
+ } else {
+ adapterParams.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ mParameters.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ }
+#endif
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ if (mVTCUseCase && !mTunnelSetup && (mCameraAdapter != NULL) &&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL )&&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL )) {
+
+ uint32_t sliceHeight = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT);
+ uint32_t encoderHandle = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE);
+ int w, h;
+ mParameters.getPreviewSize(&w, &h);
+ status_t done = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SETUP_TUNNEL, sliceHeight, encoderHandle, w, h);
+ if (done == NO_ERROR) mTunnelSetup = true;
+ ret |= done;
+ }
+#endif
+
+ // Only send parameters to adapter if preview is already
+ // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
+ // will be called in startPreview()
+ // TODO(XXX): Need to identify other parameters that need update from camera adapter
+ if ( (NULL != mCameraAdapter) &&
+ (mPreviewEnabled || updateRequired) &&
+ (!(mPreviewEnabled && restartPreviewRequired)) ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 ))
+ {
+ CAMHAL_LOGDA("Enabling shutter sound");
+
+ mShutterEnabled = true;
+ mMsgEnabled |= CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+ else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling shutter sound");
+
+ mShutterEnabled = false;
+ mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+#endif
+ }
+
+ //On fail restore old parameters
+ if ( NO_ERROR != ret ) {
+ mParameters = oldParams;
+ }
+
+ // Restart Preview if needed by KEY_RECODING_HINT only if preview is already running.
+ // If preview is not started yet, Video Mode parameters will take effect on next startPreview()
+ if (restartPreviewRequired && previewEnabled() && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Restarting Preview");
+ ret = restartPreview();
+ } else if (restartPreviewRequired && !previewEnabled() &&
+ mDisplayPaused && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Restarting preview in paused mode");
+ ret = restartPreview();
+
+ // TODO(XXX): If there is some delay between the restartPreview call and the code
+ // below, then the user could see some preview frames and callbacks. Let's find
+ // a better place to put this later...
+ if (ret == NO_ERROR) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ }
+ }
+
+ if ( !mBracketingRunning && mBracketingEnabled ) {
+ startImageBracketing();
+ }
+
+ if (ret != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Failed to restart Preview");
+ return ret;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewFormat,
+ unsigned int buffercount, unsigned int &max_queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if(mDisplayAdapter.get() == NULL)
+ {
+ // Memory allocation of preview buffers is now placed in gralloc
+ // CameraHal should not allocate preview buffers without DisplayAdapter
+ return NO_MEMORY;
+ }
+
+ if(!mPreviewBuffers)
+ {
+ mPreviewLength = 0;
+ mPreviewBuffers = mDisplayAdapter->allocateBufferList(width, height,
+ previewFormat,
+ mPreviewLength,
+ buffercount);
+ if (NULL == mPreviewBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate preview buffers");
+ return NO_MEMORY;
+ }
+
+ mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
+ if ( NULL == mPreviewOffsets ) {
+ CAMHAL_LOGEA("Buffer mapping failed");
+ return BAD_VALUE;
+ }
+
+ mBufProvider = (BufferProvider*) mDisplayAdapter.get();
+
+ ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
+ if (ret != NO_ERROR) {
+ return ret;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freePreviewBufs()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("mPreviewBuffers = %p", mPreviewBuffers);
+ if(mPreviewBuffers)
+ {
+ ret = mBufProvider->freeBufferList(mPreviewBuffers);
+ mPreviewBuffers = NULL;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ if ( NO_ERROR == ret )
+ {
+ if( NULL != mPreviewDataBuffers )
+ {
+ ret = freePreviewDataBufs();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ bytes = ((bytes+4095)/4096)*4096;
+ mPreviewDataBuffers = mMemoryManager->allocateBufferList(0, 0, NULL, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
+ if( NULL == mPreviewDataBuffers )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mPreviewDataFd = mMemoryManager->getFd();
+ mPreviewDataLength = bytes;
+ mPreviewDataOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mPreviewDataFd = -1;
+ mPreviewDataLength = 0;
+ mPreviewDataOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freePreviewDataBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mPreviewDataBuffers )
+ {
+
+ ret = mMemoryManager->freeBufferList(mPreviewDataBuffers);
+ mPreviewDataBuffers = NULL;
+
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size,
+ const char* previewFormat, unsigned int bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes = size;
+
+ LOG_FUNCTION_NAME;
+
+ // allocate image buffers only if not already allocated
+ if(NULL != mImageBuffers) {
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret ) {
+ bytes = ((bytes+4095)/4096)*4096;
+ mImageBuffers = mMemoryManager->allocateBufferList(0, 0, previewFormat, bytes, bufferCount);
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if( NULL == mImageBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ } else {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mImageFd = mMemoryManager->getFd();
+ mImageLength = bytes;
+ mImageOffsets = mMemoryManager->getOffsets();
+ mImageCount = bufferCount;
+ } else {
+ mImageFd = -1;
+ mImageLength = 0;
+ mImageOffsets = NULL;
+ mImageCount = 0;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if( NULL != mVideoBuffers ){
+ ret = freeVideoBufs(mVideoBuffers);
+ mVideoBuffers = NULL;
+ }
+
+ if ( NO_ERROR == ret ){
+ int32_t stride;
+ CameraBuffer *buffers = new CameraBuffer [bufferCount];
+
+ memset (buffers, 0, sizeof(CameraBuffer) * bufferCount);
+
+ if (buffers != NULL){
+ for (unsigned int i = 0; i< bufferCount; i++){
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
+ buffer_handle_t handle;
+ ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &handle, &stride);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc");
+ ret = -NO_MEMORY;
+ for (unsigned int j=0; j< i; j++){
+ CAMHAL_LOGEB("Freeing Gralloc Buffer %p", buffers[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)buffers[i].opaque);
+ }
+ delete [] buffers;
+ goto exit;
+ }
+ buffers[i].type = CAMERA_BUFFER_GRALLOC;
+ buffers[i].opaque = (void *)handle;
+ CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", handle);
+ }
+
+ mVideoBuffers = buffers;
+ }
+ else{
+ CAMHAL_LOGEA("Couldn't allocate video buffers ");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocRawBufs(int width, int height, const char* previewFormat, int bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+
+ ///@todo Enhance this method allocImageBufs() to take in a flag for burst capture
+ ///Always allocate the buffers for image capture using MemoryManager
+ if (NO_ERROR == ret) {
+ if(( NULL != mVideoBuffers )) {
+ // Re-use the buffer for raw capture.
+ return ret;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoLength = 0;
+ mVideoLength = (((width * height * 2) + 4095)/4096)*4096;
+ mVideoBuffers = mMemoryManager->allocateBufferList(width, height, previewFormat,
+ mVideoLength, bufferCount);
+
+ CAMHAL_LOGDB("Size of Video cap buffer (used for RAW capture) %d", mVideoLength);
+ if( NULL == mVideoBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate Video buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoFd = mMemoryManager->getFd();
+ mVideoOffsets = mMemoryManager->getOffsets();
+ } else {
+ mVideoFd = -1;
+ mVideoOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void endImageCapture( void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != userData )
+ {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->signalEndImageCapture();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void releaseImageBuffers(void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if (NULL != userData) {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->freeImageBufs();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::signalEndImageCapture()
+{
+ status_t ret = NO_ERROR;
+ int w,h;
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->disableDisplay();
+ }
+
+ if (mBufferSourceAdapter_In.get()) {
+ mBufferSourceAdapter_In->disableDisplay();
+ }
+
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ } else {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeImageBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (NULL == mImageBuffers) {
+ return -EINVAL;
+ }
+
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out = 0;
+ } else {
+ ret = mMemoryManager->freeBufferList(mImageBuffers);
+ }
+
+ mImageBuffers = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeVideoBufs(CameraBuffer *bufs)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ if(bufs == NULL)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
+
+ for(int i = 0; i < count; i++){
+ CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", bufs[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)bufs[i].opaque);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeRawBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+ if ( NO_ERROR == ret ) {
+ if( NULL != mVideoBuffers ) {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBufferList(mVideoBuffers);
+ mVideoBuffers = NULL;
+ } else {
+ ret = -EINVAL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+/**
+ @brief Start preview mode.
+
+ @param none
+ @return NO_ERROR Camera switched to VF mode
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::startPreview() {
+ LOG_FUNCTION_NAME;
+
+ // When tunneling is enabled during VTC, startPreview happens in 2 steps:
+ // When the application sends the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // cameraPreviewInitialization() is called, which in turn causes the CameraAdapter
+ // to move from loaded to idle state. And when the application calls startPreview,
+ // the CameraAdapter moves from idle to executing state.
+ //
+ // If the application calls startPreview() without sending the command
+ // CAMERA_CMD_PREVIEW_INITIALIZATION, then the function cameraPreviewInitialization()
+ // AND startPreview() are executed. In other words, if the application calls
+ // startPreview() without sending the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // then the CameraAdapter moves from loaded to idle to executing state in one shot.
+ status_t ret = cameraPreviewInitialization();
+
+ // The flag mPreviewInitializationDone is set to true at the end of the function
+ // cameraPreviewInitialization(). Therefore, if everything goes alright, then the
+ // flag will be set. Sometimes, the function cameraPreviewInitialization() may
+ // return prematurely if all the resources are not available for starting preview.
+ // For example, if the preview window is not set, then it would return NO_ERROR.
+ // Under such circumstances, one should return from startPreview as well and should
+ // not continue execution. That is why, we check the flag and not the return value.
+ if (!mPreviewInitializationDone) return ret;
+
+ // Once startPreview is called, there is no need to continue to remember whether
+ // the function cameraPreviewInitialization() was called earlier or not. And so
+ // the flag mPreviewInitializationDone is reset here. Plus, this preserves the
+ // current behavior of startPreview under the circumstances where the application
+ // calls startPreview twice or more.
+ mPreviewInitializationDone = false;
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL) {
+ CAMHAL_LOGDA("Enabling display");
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview);
+#else
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL);
+#endif
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEA("Couldn't enable display");
+
+ // FIXME: At this stage mStateSwitchLock is locked and unlock is supposed to be called
+ // only from mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW)
+ // below. But this will never happen because of goto error. Thus at next
+ // startPreview() call CameraHAL will be deadlocked.
+ // Need to revisit mStateSwitch lock, for now just abort the process.
+ CAMHAL_ASSERT_X(false,
+ "At this stage mCameraAdapter->mStateSwitchLock is still locked, "
+ "deadlock is guaranteed");
+
+ goto error;
+ }
+
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+
+ if(ret!=NO_ERROR) {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL) {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+////////////
+/**
+ @brief Set preview mode related initialization
+ -> Camera Adapter set params
+ -> Allocate buffers
+ -> Set use buffers for preview
+ @param none
+ @return NO_ERROR
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::cameraPreviewInitialization()
+{
+
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraFrame frame;
+ unsigned int required_buffer_count;
+ unsigned int max_queueble_buffers;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ gettimeofday(&mStartPreview, NULL);
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if (mPreviewInitializationDone) {
+ return NO_ERROR;
+ }
+
+ if ( mPreviewEnabled ){
+ CAMHAL_LOGDA("Preview already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ ret = mCameraAdapter->setParameters(mParameters);
+ }
+
+ if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret);
+ return ret;
+ }
+
+ ///Update the current preview width and height
+ mPreviewWidth = frame.mWidth;
+ mPreviewHeight = frame.mHeight;
+ }
+
+ ///If we don't have the preview callback enabled and display adapter,
+ if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){
+ CAMHAL_LOGD("Preview not started. Preview in progress flag set");
+ mPreviewStartInProgress = true;
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret);
+ return ret;
+ }
+ return NO_ERROR;
+ }
+
+ if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) )
+ {
+ CAMHAL_LOGDA("Preview is in paused state");
+
+ mDisplayPaused = false;
+ mPreviewEnabled = true;
+ if ( NO_ERROR == ret )
+ {
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Display adapter resume failed %x", ret);
+ }
+ }
+ //restart preview callbacks
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+
+ signalEndImageCapture();
+ return ret;
+ }
+
+ required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+
+ ///Allocate the preview buffers
+ ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Couldn't allocate buffers for Preview");
+ goto error;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA,
+ ( int ) &frame,
+ required_buffer_count);
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///Allocate the preview data buffers
+ ret = allocPreviewDataBufs(frame.mLength, required_buffer_count);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEA("Couldn't allocate preview data buffers");
+ goto error;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ desc.mBuffers = mPreviewDataBuffers;
+ desc.mOffsets = mPreviewDataOffsets;
+ desc.mFd = mPreviewDataFd;
+ desc.mLength = mPreviewDataLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) required_buffer_count;
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA,
+ ( int ) &desc);
+ }
+
+ }
+
+ ///Pass the buffers to Camera Adapter
+ desc.mBuffers = mPreviewBuffers;
+ desc.mOffsets = mPreviewOffsets;
+ desc.mFd = mPreviewFd;
+ desc.mLength = mPreviewLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) max_queueble_buffers;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW,
+ ( int ) &desc);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret);
+ freePreviewBufs();
+ return ret;
+ }
+
+ ///Start the callback notifier
+ ret = mAppCallbackNotifier->start();
+
+ if( ALREADY_EXISTS == ret )
+ {
+ //Already running, do nothing
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ ret = NO_ERROR;
+ }
+ else if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Couldn't start AppCallbackNotifier");
+ goto error;
+ }
+
+ if (ret == NO_ERROR) mPreviewInitializationDone = true;
+
+ mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
+
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL)
+ {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Preview buffers provided to CameraHal via this object. DisplayAdapter will be interfacing with it
+ to render buffers to display.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
+{
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+
+ LOG_FUNCTION_NAME;
+ mSetPreviewWindowCalled = true;
+
+ ///If the Camera service passes a null window, we destroy existing window and free the DisplayAdapter
+ if(!window)
+ {
+ if(mDisplayAdapter.get() != NULL)
+ {
+ ///NULL window passed, destroy the display adapter if present
+ CAMHAL_LOGD("NULL window passed, destroying display adapter");
+ mDisplayAdapter.clear();
+ ///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
+ ///@remarks so, we will wait until it passes a valid window to begin the preview again
+ mSetPreviewWindowCalled = false;
+ }
+ CAMHAL_LOGD("NULL ANativeWindow passed to setPreviewWindow");
+ return NO_ERROR;
+ }else if(mDisplayAdapter.get() == NULL)
+ {
+ // Need to create the display adapter since it has not been created
+ // Create display adapter
+ ANativeWindowDisplayAdapter* displayAdapter = new ANativeWindowDisplayAdapter();
+ displayAdapter->setExternalLocking(mExternalLocking);
+ if (NULL != mAppCallbackNotifier.get()) {
+ mAppCallbackNotifier->setExternalLocking(mExternalLocking);
+ } else {
+ CAMHAL_LOGE("Can't apply locking policy on AppCallbackNotifier");
+ CAMHAL_ASSERT(0);
+ }
+ mDisplayAdapter = displayAdapter;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mDisplayAdapter->setExtendedOps(mExtendedPreviewStreamOps);
+#endif
+ ret = NO_ERROR;
+ if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
+ {
+ if(ret!=NO_ERROR)
+ {
+ mDisplayAdapter.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+ }
+
+ // DisplayAdapter needs to know where to get the CameraFrames from inorder to display
+ // Since CameraAdapter is the one that provides the frames, set it as the frame provider for DisplayAdapter
+ mDisplayAdapter->setFrameProvider(mCameraAdapter);
+
+ // Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ // via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ // Set it as the error handler for the DisplayAdapter
+ mDisplayAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mDisplayAdapter->setPreviewWindow(window);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ }
+
+ if(mPreviewStartInProgress)
+ {
+ CAMHAL_LOGDA("setPreviewWindow called when preview running");
+ // Start the preview since the window is now available
+ ret = startPreview();
+ }
+ } else {
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mDisplayAdapter->setPreviewWindow(window);
+ if ( (NO_ERROR == ret) && previewEnabled() ) {
+ restartPreview();
+ } else if (ret == ALREADY_EXISTS) {
+ // ALREADY_EXISTS should be treated as a noop in this case
+ ret = NO_ERROR;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+void CameraHal::setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops)
+{
+ mExtendedPreviewStreamOps = ops;
+}
+
+/**
+ @brief Sets Tapout Surfaces.
+
+ Buffers provided to CameraHal via this object for tap-out
+ functionality.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setTapoutLocked(struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapout) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Set tapout point
+ // 1. Check name of tap-out
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ ret = out->setPreviewWindow(tapout);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap Out already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
+ }
+ }
+
+ if (index < 0) {
+ android::sp<DisplayAdapter> out = new BufferSourceAdapter();
+
+ ret = out->initialize();
+ if (ret != NO_ERROR) {
+ out.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // BufferSourceAdapter will be handler of the extended OPS
+ out->setExtendedOps(mExtendedPreviewStreamOps);
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ out->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ out->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = out->setPreviewWindow(tapout);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+
+ if (NULL != mCameraAdapter) {
+ unsigned int bufferCount, max_queueable;
+ CameraFrame frame;
+
+ bufferCount = out->getBufferCount();
+ if (bufferCount < 1) bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ if (NO_ERROR == ret) {
+ CameraBuffer *bufs = NULL;
+ unsigned int stride;
+ unsigned int height = frame.mHeight;
+ int size = frame.mLength;
+
+ stride = frame.mAlignment / getBPP(mParameters.getPictureFormat());
+ bufs = out->allocateBufferList(stride,
+ height,
+ mParameters.getPictureFormat(),
+ size,
+ bufferCount);
+ if (bufs == NULL){
+ CAMHAL_LOGEB("error allocating buffer list");
+ goto exit;
+ }
+ }
+ }
+ mOutAdapters.add(out);
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Releases Tapout Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapoutLocked(struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapout) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Get the name of tapout
+ ret = mExtendedPreviewStreamOps->get_id(tapout, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-out
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_Out.get() && mBufferSourceAdapter_Out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p previously set as current", tapout);
+ mBufferSourceAdapter_Out.clear();
+ }
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ if (out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p \"%s\" at position %d", tapout, id, i);
+ mOutAdapters.removeAt(i);
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets Tapin Surfaces.
+
+ Buffers provided to CameraHal via this object for tap-in
+ functionality.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // 1. Set tapin point
+ // 1. Check name of tap-in
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ ret = in->setPreviewWindow(tapin);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap In already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
+ }
+ }
+
+ if (index < 0) {
+ android::sp<DisplayAdapter> in = new BufferSourceAdapter();
+
+ ret = in->initialize();
+ if (ret != NO_ERROR) {
+ in.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // BufferSourceAdapter will be handler of the extended OPS
+ in->setExtendedOps(mExtendedPreviewStreamOps);
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ in->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ in->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = in->setPreviewWindow(tapin);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+
+ mInAdapters.add(in);
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases Tapin Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Get the name of tapin
+ ret = mExtendedPreviewStreamOps->get_id(tapin, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-in
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_In.get() && mBufferSourceAdapter_In->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p previously set as current", tapin);
+ mBufferSourceAdapter_In.clear();
+ }
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ if (in->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p \"%s\" at position %d", tapin, id, i);
+ mInAdapters.removeAt(i);
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Buffers provided to CameraHal via this object for tap-in/tap-out
+ functionality.
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ CAMHAL_LOGD ("setBufferSource(%p, %p)", tapin, tapout);
+
+ ret = setTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapoutLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+ ret = setTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapinLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases ANativeWindow object.
+
+ Release Buffers previously released with setBufferSource()
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ CAMHAL_LOGD ("releaseBufferSource(%p, %p)", tapin, tapout);
+ if (tapout) {
+ ret |= releaseTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap out", ret);
+ }
+ }
+
+ if (tapin) {
+ ret |= releaseTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap in", ret);
+ }
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
+
+
+/**
+ @brief Stop a previously started preview.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ if( (!previewEnabled() && !mDisplayPaused) || mRecordingEnabled)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ bool imageCaptureRunning = (mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE);
+ if(mDisplayPaused && !imageCaptureRunning)
+ {
+ // Display is paused, which essentially means there is no preview active.
+ // Note: this is done so that when stopPreview is called by client after
+ // an image capture, we do not de-initialize the camera adapter and
+ // restart over again.
+
+ return;
+ }
+
+ forceStopPreview();
+
+ // Reset Capture-Mode to default, so that when we switch from VideoRecording
+ // to ImageCapture, CAPTURE_MODE is not left to VIDEO_MODE.
+ CAMHAL_LOGDA("Resetting Capture-Mode to default");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if preview is enabled
+
+ @param none
+ @return true If preview is running currently
+ false If preview has been stopped
+
+ */
+bool CameraHal::previewEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ return (mPreviewEnabled || mPreviewStartInProgress);
+}
+
+/**
+ @brief Start record mode.
+
+ When a record image is available a CAMERA_MSG_VIDEO_FRAME message is sent with
+ the corresponding frame. Every record frame must be released by calling
+ releaseRecordingFrame().
+
+ @param none
+ @return NO_ERROR If recording could be started without any issues
+ @todo Update the header with possible error values in failure scenarios
+
+ */
+status_t CameraHal::startRecording( )
+{
+ int w, h;
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartPreview, NULL);
+
+#endif
+
+ if(!previewEnabled())
+ {
+ return NO_INIT;
+ }
+
+ // set internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.set(TICameraParameters::KEY_RECORDING_HINT, android::CameraParameters::TRUE);
+
+ // if application starts recording in continuous focus picture mode...
+ // then we need to force default capture mode (as opposed to video mode)
+ if ( ((valstr = mParameters.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ restartPreviewRequired = resetVideoModeParameters();
+ }
+
+ // only need to check recording hint if preview restart is not already needed
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
+ if ( !restartPreviewRequired &&
+ (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::TRUE) != 0))) ) {
+ restartPreviewRequired = setVideoModeParameters(mParameters);
+ }
+
+ if (restartPreviewRequired) {
+ {
+ android::AutoMutex lock(mLock);
+ mCapModeBackup = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ }
+ ret = restartPreview();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ mParameters.getPreviewSize(&w, &h);
+ CAMHAL_LOGDB("%s Video Width=%d Height=%d", __FUNCTION__, mVideoWidth, mVideoHeight);
+
+ if ((w != mVideoWidth) && (h != mVideoHeight))
+ {
+ ret = allocVideoBufs(mVideoWidth, mVideoHeight, count);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ mParameters.remove(TICameraParameters::KEY_RECORDING_HINT);
+ return ret;
+ }
+
+ mAppCallbackNotifier->useVideoBuffers(true);
+ mAppCallbackNotifier->setVideoRes(mVideoWidth, mVideoHeight);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBuffers);
+ }
+ else
+ {
+ mAppCallbackNotifier->useVideoBuffers(false);
+ mAppCallbackNotifier->setVideoRes(mPreviewWidth, mPreviewHeight);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mAppCallbackNotifier->startRecording();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Buffers for video capture (if different from preview) are expected to be allocated within CameraAdapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_VIDEO);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mRecordingEnabled = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Set the camera parameters specific to Video Recording.
+
+ This function checks for the camera parameters which have to be set for recording.
+ Video Recording needs CAPTURE_MODE to be VIDEO_MODE. This function sets it.
+ This function also enables Video Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+ @todo Modify the policies for enabling VSTAB & VNF usecase based later.
+
+ */
+bool CameraHal::setVideoModeParameters(const android::CameraParameters& params)
+{
+ const char *valstr = NULL;
+ const char *valstrRemote = NULL;
+ bool restartPreviewRequired = false;
+
+ LOG_FUNCTION_NAME;
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && ( (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) &&
+ (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ ) != 0) ) ) ) {
+ CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
+ restartPreviewRequired = true;
+ }
+
+ // set VSTAB, restart is required if VSTAB value has changed
+ int prevWidth, prevHeight;
+ params.getPreviewSize(&prevWidth, &prevHeight);
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (prevWidth == 1920 &&
+ (mSocFamily == SocFamily_Omap4430 || mSocFamily == SocFamily_Omap4460)) {
+ // forcibly set to false because of not enough memory for needed buffer size in this case
+ CAMHAL_LOGDA("Forcing VSTAB off");
+ if (strcmp(valstr, android::CameraParameters::FALSE) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ android::CameraParameters::FALSE);
+ } else {
+ if ((valstrRemote = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ // make sure we support vstab
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ if (strcmp(valstr, valstrRemote) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ valstrRemote);
+ }
+ } else if (mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ // vstab was configured but now unset
+ restartPreviewRequired = true;
+ mParameters.remove(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ }
+ }
+
+ // Set VNF
+ if ((valstrRemote = params.get(TICameraParameters::KEY_VNF)) == NULL) {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ } else {
+ valstr = mParameters.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, valstrRemote) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(TICameraParameters::KEY_VNF, valstrRemote);
+ }
+
+#if !defined(OMAP_ENHANCEMENT) && !defined(ENHANCED_DOMX)
+ // For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
+ // In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
+ // So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
+ int w, h;
+ params.getPreviewSize(&w, &h);
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, android::CameraParameters::TRUE) == 0) && (w == 1920)) {
+ CAMHAL_LOGDA("Force Enable VNF for 1080p");
+ const char *valKeyVnf = mParameters.get(TICameraParameters::KEY_VNF);
+ if(!valKeyVnf || (strcmp(valKeyVnf, android::CameraParameters::TRUE) != 0)) {
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ }
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Reset the camera parameters specific to Video Recording.
+
+ This function resets CAPTURE_MODE and disables Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+
+ */
+bool CameraHal::resetVideoModeParameters()
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // ignore this if we are already recording
+ if (mRecordingEnabled) {
+ return false;
+ }
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ((valstr != NULL) && (strcmp(valstr, TICameraParameters::VIDEO_MODE) == 0)) {
+ CAMHAL_LOGDA("Reset Capture-Mode to default");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+ restartPreviewRequired = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Restart the preview with setParameter.
+
+ This function restarts preview, for some VIDEO_MODE parameters to take effect.
+
+ @param none
+ @return NO_ERROR If recording parameters could be set without any issues
+
+ */
+status_t CameraHal::restartPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview().
+
+ forceStopPreview();
+
+ {
+ android::AutoMutex lock(mLock);
+ if (!mCapModeBackup.isEmpty()) {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, mCapModeBackup.string());
+ } else {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+ }
+ mCameraAdapter->setParameters(mParameters);
+ }
+
+ ret = startPreview();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Stop a previously started recording.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopRecording()
+{
+ CameraAdapter::AdapterState currentState;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (!mRecordingEnabled )
+ {
+ return;
+ }
+
+ currentState = mCameraAdapter->getState();
+ if (currentState == CameraAdapter::VIDEO_CAPTURE_STATE) {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ mAppCallbackNotifier->stopRecording();
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_VIDEO);
+
+ mRecordingEnabled = false;
+
+ if ( mAppCallbackNotifier->getUesVideoBuffers() ){
+ freeVideoBufs(mVideoBuffers);
+ if (mVideoBuffers){
+ CAMHAL_LOGVB(" FREEING mVideoBuffers %p", mVideoBuffers);
+ delete [] mVideoBuffers;
+ }
+ mVideoBuffers = NULL;
+ }
+
+ // reset internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.remove(TICameraParameters::KEY_RECORDING_HINT);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if recording is enabled.
+
+ @param none
+ @return true If recording is currently running
+ false If recording has been stopped
+
+ */
+int CameraHal::recordingEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mRecordingEnabled;
+}
+
+/**
+ @brief Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+
+ @param[in] mem MemoryBase pointer to the frame being released. Must be one of the buffers
+ previously given by CameraHal
+ @return none
+
+ */
+void CameraHal::releaseRecordingFrame(const void* mem)
+{
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDB(" 0x%x", mem->pointer());
+
+ if ( ( mRecordingEnabled ) && mem != NULL)
+ {
+ mAppCallbackNotifier->releaseRecordingFrame(mem);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return;
+}
+
+/**
+ @brief Start auto focus
+
+ This call asynchronous.
+ The notification callback routine is called with CAMERA_MSG_FOCUS once when
+ focusing is complete. autoFocus() will be called again if another auto focus is
+ needed.
+
+ @param none
+ @return NO_ERROR
+ @todo Define the error codes if the focus is not locked
+
+ */
+status_t CameraHal::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartFocus, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ mMsgEnabled |= CAMERA_MSG_FOCUS;
+
+ if ( NULL == mCameraAdapter )
+ {
+ ret = -1;
+ goto EXIT;
+ }
+
+ CameraAdapter::AdapterState state;
+ ret = mCameraAdapter->getState(state);
+ if (ret != NO_ERROR)
+ {
+ goto EXIT;
+ }
+
+ if (state == CameraAdapter::AF_STATE)
+ {
+ CAMHAL_LOGI("Ignoring start-AF (already in progress)");
+ goto EXIT;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass the autoFocus timestamp along with the command to camera adapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS, ( int ) &mStartFocus);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS);
+
+#endif
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Cancels auto-focus function.
+
+ If the auto-focus is still in progress, this function will cancel it.
+ Whether the auto-focus is in progress or not, this function will return the
+ focus position to the default. If the camera does not support auto-focus, this is a no-op.
+
+
+ @param none
+ @return NO_ERROR If the cancel succeeded
+ @todo Define error codes if cancel didnt succeed
+
+ */
+status_t CameraHal::cancelAutoFocus()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ android::CameraParameters adapterParams = mParameters;
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS;
+
+ if( NULL != mCameraAdapter )
+ {
+ adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, android::CameraParameters::FALSE);
+ mCameraAdapter->setParameters(adapterParams);
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
+ mAppCallbackNotifier->flushEventQueue();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+void CameraHal::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallbackRelay(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ CameraHal *appcbn = ( CameraHal * ) (event->mCookie);
+ appcbn->eventCallback(event );
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallback(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::startImageBracketing()
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ unsigned int max_queueable = 0;
+
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( !mBracketingEnabled )
+ {
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingRunning = true;
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ ( mBracketRangeNegative + 1 ));
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ ( mBracketRangeNegative + 1 ));
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+
+ desc.mBuffers = mImageBuffers;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
+ desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+
+ if ( NO_ERROR == ret )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ), (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ));
+
+#endif
+
+ }
+ }
+
+ return ret;
+}
+
+status_t CameraHal::stopImageBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if( !previewEnabled() )
+ {
+ return NO_INIT;
+ }
+
+ mBracketingRunning = false;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Take a picture.
+
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::takePicture(const char *params)
+{
+ android::AutoMutex lock(mLock);
+ return __takePicture(params);
+}
+
+/**
+ @brief Internal function for getting a captured image.
+ shared by takePicture and reprocess.
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::__takePicture(const char *params, struct timeval *captureStart)
+{
+ // cancel AF state if needed (before any operation and mutex lock)
+ if (mCameraAdapter->getState() == CameraAdapter::AF_STATE) {
+ cancelAutoFocus();
+ }
+
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ int burst = -1;
+ const char *valstr = NULL;
+ /* Increasing the count from 1 to 3 because the V4LAdapter seems to require at least 3 buffers
+ * for starting streaming
+ */
+ unsigned int bufferCount = 3;
+ unsigned int max_queueable = 0;
+ unsigned int rawBufferCount = 1;
+ bool isCPCamMode = false;
+ android::sp<DisplayAdapter> outAdapter = 0;
+ bool reuseTapout = false;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( NULL == captureStart ) {
+ gettimeofday(&mStartCapture, NULL);
+ } else {
+ memcpy(&mStartCapture, captureStart, sizeof(struct timeval));
+ }
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ CAMHAL_LOGEA("Preview not started...");
+ return NO_INIT;
+ }
+
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+
+ isCPCamMode = valstr && !strcmp(valstr, TICameraParameters::CP_CAM_MODE);
+
+ // return error if we are already capturing
+ // however, we can queue a capture when in cpcam mode
+ if ( ((mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) ||
+ (mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE)) &&
+ !isCPCamMode) {
+ CAMHAL_LOGEA("Already capturing an image...");
+ return NO_INIT;
+ }
+
+ // we only support video snapshot if we are in video mode (recording hint is set)
+ if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
+ (valstr && ( strcmp(valstr, TICameraParameters::VIDEO_MODE) &&
+ strcmp(valstr, TICameraParameters::VIDEO_MODE_HQ ) ) ) ) {
+ CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
+ return INVALID_OPERATION;
+ }
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ // check if camera application is using shots parameters
+ // api. parameters set here override anything set using setParameters
+ // TODO(XXX): Just going to use legacy TI parameters for now. Need
+ // add new APIs in CameraHal to utilize android::ShotParameters later, so
+ // we don't have to parse through the whole set of parameters
+ // in camera adapter
+ if (strlen(params) > 0) {
+ android::ShotParameters shotParams;
+ const char *valStr;
+ const char *valExpComp, *valExpGain;
+ int valNum;
+
+ android::String8 shotParams8(params);
+
+ shotParams.unflatten(shotParams8);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+
+ valExpGain = shotParams.get(android::ShotParameters::KEY_EXP_GAIN_PAIRS);
+ valExpComp = shotParams.get(android::ShotParameters::KEY_EXP_COMPENSATION);
+ if (NULL != valExpComp) {
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valExpComp);
+ } else if (NULL != valExpGain) {
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valExpGain);
+ }
+
+ valNum = shotParams.getInt(android::ShotParameters::KEY_BURST);
+ if (valNum >= 0) {
+ mParameters.set(TICameraParameters::KEY_BURST, valNum);
+ burst = valNum;
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_FLUSH_CONFIG);
+ if (valStr!= NULL) {
+ if ( 0 == strcmp(valStr, android::ShotParameters::TRUE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::TRUE);
+ } else if ( 0 == strcmp(valStr, android::ShotParameters::FALSE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::FALSE);
+ }
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_OUT);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ if(mOutAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap out surface passed to camerahal");
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Found matching out adapter at %d", index);
+ outAdapter = mOutAdapters.itemAt(index);
+ if ( outAdapter == mBufferSourceAdapter_Out ) {
+ reuseTapout = true;
+ }
+ }
+
+ mCameraAdapter->setParameters(mParameters);
+ } else
+#endif
+ {
+ // TODO(XXX): Should probably reset burst and bracketing params
+ // when we remove legacy TI parameters implementation
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture parameters set: ", &mStartCapture);
+
+#endif
+
+ // if we are already in the middle of a capture and using the same
+ // tapout ST...then we just need setParameters and start image
+ // capture to queue more shots
+ if (((mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) ==
+ CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) &&
+ (reuseTapout) ) {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE,
+ (int) &mStartCapture);
+#else
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+#endif
+ return ret;
+ }
+
+ if ( !mBracketingRunning )
+ {
+ // if application didn't set burst through android::ShotParameters
+ // then query from TICameraParameters
+ if ((burst == -1) && (NO_ERROR == ret)) {
+ burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ }
+
+ //Allocate all buffers only in burst capture case
+ if ( burst > 0 ) {
+ // For CPCam mode...allocate for worst case burst
+ bufferCount = isCPCamMode || (burst > CameraHal::NO_BUFFERS_IMAGE_CAPTURE) ?
+ CameraHal::NO_BUFFERS_IMAGE_CAPTURE : burst;
+
+ if (outAdapter.get()) {
+ if ( reuseTapout ) {
+ bufferCount = mImageCount;
+ } else {
+ bufferCount = outAdapter->getBufferCount();
+ if (bufferCount < 1) {
+ bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ }
+ }
+ }
+
+ if ( NULL != mAppCallbackNotifier.get() ) {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ } else if ( mBracketingEnabled ) {
+ bufferCount = mBracketRangeNegative + 1;
+ if ( NULL != mAppCallbackNotifier.get() ) {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ } else {
+ if ( NULL != mAppCallbackNotifier.get() ) {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ }
+
+ // pause preview during normal image capture
+ // do not pause preview if recording (video state)
+ if ( (NO_ERROR == ret) && (NULL != mDisplayAdapter.get()) ) {
+ if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ // since preview is paused we should stop sending preview frames too
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ mAppCallbackNotifier->disableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ mDisplayAdapter->setSnapshotTimeRef(&mStartCapture);
+#endif
+ }
+
+ // if we taking video snapshot...
+ if ((NO_ERROR == ret) && (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE)) {
+ // enable post view frames if not already enabled so we can internally
+ // save snapshot frames for generating thumbnail
+ if((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) == 0) {
+ mAppCallbackNotifier->enableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && (NULL != mCameraAdapter) )
+ {
+ if ( NO_ERROR == ret )
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffer size queried: ", &mStartCapture);
+
+#endif
+
+ if (outAdapter.get()) {
+ // Avoid locking the tapout again when reusing it
+ if (!reuseTapout) {
+ // Need to reset buffers if we are switching adapters since we don't know
+ // the state of the new buffer list
+ ret = outAdapter->maxQueueableBuffers(max_queueable);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("Couldn't get max queuable");
+ return ret;
+ }
+ mImageBuffers = outAdapter->getBuffers(true);
+ mImageOffsets = outAdapter->getOffsets();
+ mImageFd = outAdapter->getFd();
+ mImageLength = outAdapter->getSize();
+ mImageCount = bufferCount;
+ mBufferSourceAdapter_Out = outAdapter;
+ }
+ } else {
+ mBufferSourceAdapter_Out.clear();
+ // allocImageBufs will only allocate new buffers if mImageBuffers is NULL
+ if ( NO_ERROR == ret ) {
+ max_queueable = bufferCount;
+ ret = allocImageBufs(frame.mAlignment / getBPP(mParameters.getPictureFormat()),
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers allocated: ", &mStartCapture);
+ memcpy(&mImageBuffers->ppmStamp, &mStartCapture, sizeof(struct timeval));
+
+#endif
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ desc.mBuffers = mImageBuffers;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) max_queueable;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+ }
+ if (mRawCapture) {
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDB("Raw capture buffers setup - %s", mParameters.getPictureFormat());
+ ret = allocRawBufs(mParameters.getInt(TICameraParameters::RAW_WIDTH),
+ mParameters.getInt(TICameraParameters::RAW_HEIGHT),
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+ rawBufferCount);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocRawBufs (for RAW capture) returned error 0x%x", ret);
+ }
+ }
+
+ if ((NO_ERROR == ret) && ( NULL != mCameraAdapter )) {
+ desc.mBuffers = mVideoBuffers;
+ desc.mOffsets = mVideoOffsets;
+ desc.mFd = mVideoFd;
+ desc.mLength = mVideoLength;
+ desc.mCount = ( size_t ) rawBufferCount;
+ desc.mMaxQueueable = ( size_t ) rawBufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE,
+ ( int ) &desc);
+ }
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers registered: ", &mStartCapture);
+
+#endif
+
+ if ((ret == NO_ERROR) && mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->enableDisplay(0, 0, NULL);
+ }
+
+ if ((NO_ERROR == ret) && (NULL != mCameraAdapter)) {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+
+ CameraHal::PPM("Takepicture capture started: ", &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+
+#endif
+
+ }
+
+ return ret;
+}
+
+/**
+ @brief Cancel a picture that was started with takePicture.
+
+ Calling this method when no picture is being taken is a no-op.
+
+ @param none
+ @return NO_ERROR If cancel succeeded. Cancel can succeed if image callback is not sent
+ @todo Define error codes
+
+ */
+status_t CameraHal::cancelPicture( )
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ret = signalEndImageCapture();
+ return NO_ERROR;
+}
+
+/**
+ @brief Return the camera parameters.
+
+ @param none
+ @return Currently configured camera parameters
+
+ */
+char* CameraHal::getParameters()
+{
+ android::String8 params_str8;
+ char* params_string;
+ const char * valstr = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if( NULL != mCameraAdapter )
+ {
+ mCameraAdapter->getParameters(mParameters);
+ }
+
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+ }
+
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+ }
+
+ android::CameraParameters mParams = mParameters;
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
+ {
+ //HACK FOR MMS MODE
+ resetPreviewRes(&mParams);
+ }
+ }
+
+ // do not send internal parameters to upper layers
+ mParams.remove(TICameraParameters::KEY_RECORDING_HINT);
+ mParams.remove(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
+
+ params_str8 = mParams.flatten();
+
+ // camera service frees this string...
+ params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1));
+ strcpy(params_string, params_str8.string());
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ ///Return the current set of parameters
+
+ return params_string;
+}
+
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+/**
+ @brief Starts reprocessing operation.
+ */
+status_t CameraHal::reprocess(const char *params)
+{
+ status_t ret = NO_ERROR;
+ int bufferCount = 0;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraBuffer *reprocBuffers = NULL;
+ android::ShotParameters shotParams;
+ const char *valStr = NULL;
+ struct timeval startReprocess;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&startReprocess, NULL);
+
+#endif
+
+ // 0. Get tap in surface
+ if (strlen(params) > 0) {
+ android::String8 shotParams8(params);
+ shotParams.unflatten(shotParams8);
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_IN);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ if(mInAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap in surface passed to camerahal");
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Found matching in adapter at %d", index);
+ mBufferSourceAdapter_In = mInAdapters.itemAt(index);
+ } else {
+ CAMHAL_LOGE("No tap in surface sent with shot config!");
+ return BAD_VALUE;
+ }
+
+ // 1. Get buffers
+ if (mBufferSourceAdapter_In.get()) {
+ reprocBuffers = mBufferSourceAdapter_In->getBufferList(&bufferCount);
+ }
+
+ if (!reprocBuffers) {
+ CAMHAL_LOGE("Error: couldn't get input buffers for reprocess()");
+ goto exit;
+ }
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Got reprocess buffers: ", &startReprocess);
+
+#endif
+
+ // 2. Get buffer information and parse parameters
+ {
+ shotParams.setBurst(bufferCount);
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ memcpy(&reprocBuffers->ppmStamp, &startReprocess, sizeof(struct timeval));
+
+#endif
+
+ // 3. Give buffer to camera adapter
+ desc.mBuffers = reprocBuffers;
+ desc.mOffsets = 0;
+ desc.mFd = 0;
+ desc.mLength = 0;
+ desc.mCount = (size_t) bufferCount;
+ desc.mMaxQueueable = (size_t) bufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS, (int) &desc);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error calling camera use buffers");
+ goto exit;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess buffers registered: ", &startReprocess);
+
+#endif
+
+ // 4. Start reprocessing
+ ret = mBufferSourceAdapter_In->enableDisplay(0, 0, NULL);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error enabling tap in point");
+ goto exit;
+ }
+
+ // 5. Start capturing
+ ret = __takePicture(shotParams.flatten().string(), &startReprocess);
+
+exit:
+ return ret;
+}
+
+/**
+ @brief Cancels current reprocessing operation
+
+ */
+status_t CameraHal::cancel_reprocess( )
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ret = signalEndImageCapture();
+ return NO_ERROR;
+}
+#endif
+
+
+void CameraHal::putParameters(char *parms)
+{
+ free(parms);
+}
+
+/**
+ @brief Send command to camera driver.
+
+ @param none
+ @return NO_ERROR If the command succeeds
+ @todo Define the error codes that this function can return
+
+ */
+status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
+ {
+ CAMHAL_LOGEA("No CameraAdapter instance");
+ return -EINVAL;
+ }
+
+ ///////////////////////////////////////////////////////
+ // Following commands NEED preview to be started
+ ///////////////////////////////////////////////////////
+
+ if ((!previewEnabled()) && ((cmd == CAMERA_CMD_START_SMOOTH_ZOOM)
+ || (cmd == CAMERA_CMD_STOP_SMOOTH_ZOOM)
+ || (cmd == CAMERA_CMD_START_FACE_DETECTION)
+#ifdef OMAP_ENHANCEMENT_VTC
+ || (cmd == CAMERA_CMD_PREVIEW_DEINITIALIZATION)
+#endif
+ ))
+ {
+ CAMHAL_LOGEA("sendCommand with cmd = 0x%x need preview to be started", cmd);
+ return BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ switch(cmd)
+ {
+ case CAMERA_CMD_START_SMOOTH_ZOOM:
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_SMOOTH_ZOOM, arg1);
+
+ break;
+
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ break;
+
+ case CAMERA_CMD_START_FACE_DETECTION:
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD);
+
+ break;
+
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+ if (previewEnabled()) {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+ }
+
+ break;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CAMERA_CMD_PREVIEW_DEINITIALIZATION:
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->flushAndReturnFrames();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_DESTROY_TUNNEL);
+ mTunnelSetup = false;
+
+ break;
+
+ case CAMERA_CMD_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+
+ break;
+#endif
+
+#ifdef ANDROID_API_JB_OR_LATER
+ case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
+ {
+ const bool enable = static_cast<bool>(arg1);
+ android::AutoMutex lock(mLock);
+ if ( enable ) {
+ mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
+ } else {
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE;
+ }
+ break;
+ }
+#endif
+
+ default:
+ break;
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Release the hardware resources owned by this object.
+
+ Note that this is *not* done in the destructor.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::release()
+{
+ LOG_FUNCTION_NAME;
+ ///@todo Investigate on how release is used by CameraService. Vaguely remember that this is called
+ ///just before CameraHal object destruction
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+/**
+ @brief Dump state of the camera hardware
+
+ @param[in] fd File descriptor
+ @param[in] args Arguments
+ @return NO_ERROR Dump succeeded
+ @todo Error codes for dump fail
+
+ */
+status_t CameraHal::dump(int fd) const
+{
+ LOG_FUNCTION_NAME;
+ ///Implement this method when the h/w dump function is supported on Ducati side
+ return NO_ERROR;
+}
+
+/*-------------Camera Hal Interface Method definitions ENDS here--------------------*/
+
+
+
+
+/*-------------Camera Hal Internal Method definitions STARTS here--------------------*/
+
+/**
+ @brief Constructor of CameraHal
+
+ Member variables are initialized here. No allocations should be done here as we
+ don't use c++ exceptions in the code.
+
+ */
+CameraHal::CameraHal(int cameraId)
+ : mSocFamily(getSocFamily())
+{
+ LOG_FUNCTION_NAME;
+
+ ///Initialize all the member variables to their defaults
+ mPreviewEnabled = false;
+ mPreviewBuffers = NULL;
+ mImageBuffers = NULL;
+ mBufProvider = NULL;
+ mPreviewStartInProgress = false;
+ mVideoBuffers = NULL;
+ mVideoBufProvider = NULL;
+ mRecordingEnabled = false;
+ mDisplayPaused = false;
+ mSetPreviewWindowCalled = false;
+ mMsgEnabled = 0;
+ mAppCallbackNotifier = NULL;
+ mMemoryManager = NULL;
+ mCameraAdapter = NULL;
+ mBracketingEnabled = false;
+ mBracketingRunning = false;
+ mEventProvider = NULL;
+ mBracketRangePositive = 1;
+ mBracketRangeNegative = 1;
+ mMaxZoomSupported = 0;
+ mShutterEnabled = true;
+ mMeasurementEnabled = false;
+ mPreviewDataBuffers = NULL;
+ mCameraProperties = NULL;
+ mCurrentTime = 0;
+ mFalsePreview = 0;
+ mImageOffsets = NULL;
+ mImageLength = 0;
+ mImageFd = 0;
+ mImageCount = 0;
+ mVideoOffsets = NULL;
+ mVideoFd = 0;
+ mVideoLength = 0;
+ mPreviewDataOffsets = NULL;
+ mPreviewDataFd = 0;
+ mPreviewDataLength = 0;
+ mPreviewFd = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+ mPreviewLength = 0;
+ mPreviewOffsets = NULL;
+ mPreviewRunning = 0;
+ mPreviewStateOld = 0;
+ mRecordingEnabled = 0;
+ mRecordEnabled = 0;
+ mSensorListener = NULL;
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+#ifdef OMAP_ENHANCEMENT_VTC
+ mVTCUseCase = false;
+ mTunnelSetup = false;
+#endif
+ mPreviewInitializationDone = false;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedPreviewStreamOps = 0;
+#endif
+
+ //These values depends on the sensor characteristics
+
+ mRawCapture = false;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Initialize the CameraHAL constructor timestamp, which is used in the
+ // PPM() method as time reference if the user does not supply one.
+ gettimeofday(&ppm_start, NULL);
+
+#endif
+
+ mCameraIndex = cameraId;
+
+ mExternalLocking = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor of CameraHal
+
+ This function simply calls deinitialize() to free up memory allocate during construct
+ phase
+ */
+CameraHal::~CameraHal()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Call de-initialize here once more - it is the last chance for us to relinquish all the h/w and s/w resources
+ deinitialize();
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ /// Free the callback notifier
+ mAppCallbackNotifier.clear();
+
+ /// Free the display adapter
+ mDisplayAdapter.clear();
+
+ if ( NULL != mCameraAdapter ) {
+ int strongCount = mCameraAdapter->getStrongCount();
+
+ mCameraAdapter->decStrong(mCameraAdapter);
+
+ mCameraAdapter = NULL;
+ }
+
+ freeImageBufs();
+ freeRawBufs();
+
+ /// Free the memory manager
+ mMemoryManager.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Initialize the Camera HAL
+
+ Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
+
+ @param None
+ @return NO_ERROR - On success
+ NO_MEMORY - On failure to allocate memory for any of the objects
+ @remarks Camera Hal internal function
+
+ */
+
+status_t CameraHal::initialize(CameraProperties::Properties* properties)
+{
+ LOG_FUNCTION_NAME;
+
+ int sensor_index = 0;
+ const char* sensor_name = NULL;
+
+ ///Initialize the event mask used for registering an event provider for AppCallbackNotifier
+ ///Currently, registering all events as to be coming from CameraAdapter
+ int32_t eventMask = CameraHalEvent::ALL_EVENTS;
+
+ // Get my camera properties
+ mCameraProperties = properties;
+
+ if(!mCameraProperties)
+ {
+ goto fail_loop;
+ }
+
+ // Dump the properties of this Camera
+ // will only print if DEBUG macro is defined
+ mCameraProperties->dump();
+
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
+ {
+ sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
+ }
+
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_NAME)) != 0 ) {
+ sensor_name = mCameraProperties->get(CameraProperties::CAMERA_NAME);
+ }
+ CAMHAL_LOGDB("Sensor index= %d; Sensor name= %s", sensor_index, sensor_name);
+
+ if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
+#ifdef V4L_CAMERA_ADAPTER
+ mCameraAdapter = V4LCameraAdapter_Factory(sensor_index, this);
+#endif
+ }
+ else {
+#ifdef OMX_CAMERA_ADAPTER
+ mCameraAdapter = OMXCameraAdapter_Factory(sensor_index);
+#endif
+ }
+
+ if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
+ mCameraAdapter = NULL;
+ goto fail_loop;
+ }
+
+ mCameraAdapter->incStrong(mCameraAdapter);
+ mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
+ mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
+
+ if(!mAppCallbackNotifier.get())
+ {
+ /// Create the callback notifier
+ mAppCallbackNotifier = new AppCallbackNotifier();
+ if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier");
+ goto fail_loop;
+ }
+ }
+
+ if(!mMemoryManager.get())
+ {
+ /// Create Memory Manager
+ mMemoryManager = new MemoryManager();
+ if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
+ goto fail_loop;
+ }
+ }
+
+ ///Setup the class dependencies...
+
+ ///AppCallbackNotifier has to know where to get the Camera frames and the events like auto focus lock etc from.
+ ///CameraAdapter is the one which provides those events
+ ///Set it as the frame and event providers for AppCallbackNotifier
+ ///@remarks setEventProvider API takes in a bit mask of events for registering a provider for the different events
+ /// That way, if events can come from DisplayAdapter in future, we will be able to add it as provider
+ /// for any event
+ mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
+ mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
+
+ ///Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ ///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ ///Set it as the error handler for CameraAdapter
+ mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ ///Start the callback notifier
+ if(mAppCallbackNotifier->start() != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
+ goto fail_loop;
+ }
+
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+
+ ///Initialize default parameters
+ initDefaultParameters();
+
+
+ if ( setParameters(mParameters) != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Failed to set default parameters?!");
+ }
+
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+ fail_loop:
+
+ ///Free up the resources because we failed somewhere up
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_MEMORY;
+
+}
+
+bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
+{
+ bool ret = false;
+ status_t status = NO_ERROR;
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if (NULL == supportedResolutions) {
+ CAMHAL_LOGEA("Invalid supported resolutions string");
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%dx%d", width, height);
+ if (0 > status) {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ goto exit;
+ }
+
+ ret = isParameterValid(tmpBuffer, supportedResolutions);
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges)
+{
+ bool ret = false;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char *pos;
+ int suppFpsRangeArray[2];
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedFpsRanges ) {
+ CAMHAL_LOGEA("Invalid supported FPS ranges string");
+ return false;
+ }
+
+ if (fpsMin <= 0 || fpsMax <= 0 || fpsMin > fpsMax) {
+ return false;
+ }
+
+ strncpy(supported, supportedFpsRanges, MAX_PROP_VALUE_LENGTH);
+ pos = strtok(supported, " (,)");
+ while (pos != NULL) {
+ suppFpsRangeArray[i] = atoi(pos);
+ if (i++) {
+ if (fpsMin >= suppFpsRangeArray[0] && fpsMax <= suppFpsRangeArray[1]) {
+ ret = true;
+ break;
+ }
+ i = 0;
+ }
+ pos = strtok(NULL, " (,)");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(const char *param, const char *supportedParams)
+{
+ bool ret = false;
+ char *pos;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ if (NULL == supportedParams) {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ goto exit;
+ }
+
+ if (NULL == param) {
+ CAMHAL_LOGEA("Invalid parameter string");
+ goto exit;
+ }
+
+ strncpy(supported, supportedParams, MAX_PROP_VALUE_LENGTH - 1);
+
+ pos = strtok(supported, ",");
+ while (pos != NULL) {
+ if (!strcmp(pos, param)) {
+ ret = true;
+ break;
+ }
+ pos = strtok(NULL, ",");
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(int param, const char *supportedParams)
+{
+ bool ret = false;
+ status_t status;
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ if (NULL == supportedParams) {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%d", param);
+ if (0 > status) {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ goto exit;
+ }
+
+ ret = isParameterValid(tmpBuffer, supportedParams);
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::doesSetParameterNeedUpdate(const char* new_param, const char* old_param, bool& update) {
+ if (!new_param || !old_param) {
+ return -EINVAL;
+ }
+
+ // if params mismatch we should update parameters for camera adapter
+ if ((strcmp(new_param, old_param) != 0)) {
+ update = true;
+ }
+
+ return NO_ERROR;
+}
+
+status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
+{
+ status_t ret = NO_ERROR;
+ char *ctx, *pWidth, *pHeight;
+ const char *sep = "x";
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == resStr )
+ {
+ return -EINVAL;
+ }
+
+ //This fixes "Invalid input resolution"
+ char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
+ if ( NULL != resStr_copy )
+ {
+ strcpy(resStr_copy, resStr);
+ pWidth = strtok_r(resStr_copy, sep, &ctx);
+
+ if ( NULL != pWidth )
+ {
+ width = atoi(pWidth);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ pHeight = strtok_r(NULL, sep, &ctx);
+
+ if ( NULL != pHeight )
+ {
+ height = atoi(pHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ free(resStr_copy);
+ resStr_copy = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void CameraHal::insertSupportedParams()
+{
+ LOG_FUNCTION_NAME;
+
+ android::CameraParameters &p = mParameters;
+
+ ///Set the name of the camera
+ p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
+
+ mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(android::CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(android::CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(android::CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
+ p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP));
+ p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
+ p.set(android::CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(android::CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(android::CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE_VALUES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(TICameraParameters::KEY_VNF_SUPPORTED, mCameraProperties->get(CameraProperties::VNF_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED));
+ p.set(TICameraParameters::KEY_CAP_MODE_VALUES, mCameraProperties->get(CameraProperties::CAP_MODE_VALUES));
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void CameraHal::initDefaultParameters()
+{
+ //Purpose of this function is to initialize the default current and supported parameters for the currently
+ //selected camera.
+
+ android::CameraParameters &p = mParameters;
+ int currentRevision, adapterRevision;
+ status_t ret = NO_ERROR;
+ int width, height;
+ const char *valstr;
+
+ LOG_FUNCTION_NAME;
+
+ insertSupportedParams();
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPreviewSize(width, height);
+ }
+ else
+ {
+ p.setPreviewSize(MIN_WIDTH, MIN_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PICTURE_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPictureSize(width, height);
+ }
+ else
+ {
+ p.setPictureSize(PICTURE_WIDTH, PICTURE_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ }
+ else
+ {
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ }
+
+ //Insert default values
+ p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
+ p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
+ p.set(android::CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(android::CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(android::CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(android::CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(android::CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(android::CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(android::CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(android::CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
+ p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
+ p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
+ p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
+ p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
+ p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
+ p.set(TICameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE));
+ p.set(TICameraParameters::KEY_GBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GBCE));
+ p.set(TICameraParameters::KEY_GLBCE, mCameraProperties->get(CameraProperties::GLBCE));
+ p.set(TICameraParameters::KEY_GLBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, mCameraProperties->get(CameraProperties::MANUAL_CONVERGENCE));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(TICameraParameters::KEY_VNF, mCameraProperties->get(CameraProperties::VNF));
+ p.set(android::CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(android::CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(android::CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
+ p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(android::CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION));
+ // Only one area a.k.a Touch AF for now.
+ // TODO: Add support for multiple focus areas.
+ p.set(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(TICameraParameters::RAW_WIDTH, mCameraProperties->get(CameraProperties::RAW_WIDTH));
+ p.set(TICameraParameters::RAW_HEIGHT,mCameraProperties->get(CameraProperties::RAW_HEIGHT));
+
+ // TI extensions for enable/disable algos
+ // Hadcoded for now
+ p.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, android::CameraParameters::FALSE);
+ p.set(TICameraParameters::KEY_ALGO_NSF1, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF2, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_SHARPENING, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_GIC, android::CameraParameters::TRUE);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Stop a previously started preview.
+ @param none
+ @return none
+
+ */
+void CameraHal::forceStopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ // stop bracketing if it is running
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
+
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->flushAndReturnFrames();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ // only need to send these control commands to state machine if we are
+ // passed the LOADED_PREVIEW_STATE
+ if (mCameraAdapter->getState() > CameraAdapter::LOADED_PREVIEW_STATE) {
+ // according to javadoc...FD should be stopped in stopPreview
+ // and application needs to call startFaceDection again
+ // to restart FD
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+ }
+
+ mCameraAdapter->rollbackToInitializedState();
+
+ }
+
+ freePreviewBufs();
+ freePreviewDataBufs();
+
+ mPreviewEnabled = false;
+ mDisplayPaused = false;
+ mPreviewStartInProgress = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Deallocates memory for all the resources held by Camera HAL.
+
+ Frees the following objects- CameraAdapter, AppCallbackNotifier, DisplayAdapter,
+ and Memory Manager
+
+ @param none
+ @return none
+
+ */
+void CameraHal::deinitialize()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled || mDisplayPaused ) {
+ forceStopPreview();
+ }
+
+ mSetPreviewWindowCalled = false;
+
+ if (mSensorListener.get()) {
+ mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION);
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+
+ mBufferSourceAdapter_Out.clear();
+ mBufferSourceAdapter_In.clear();
+ mOutAdapters.clear();
+ mInAdapters.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t CameraHal::storeMetaDataInBuffers(bool enable)
+{
+ LOG_FUNCTION_NAME;
+
+ return mAppCallbackNotifier->useMetaDataBufferMode(enable);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::setExternalLocking(bool extBuffLocking)
+{
+ mExternalLocking = extBuffLocking;
+}
+
+void CameraHal::resetPreviewRes(android::CameraParameters *params)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( (mVideoWidth <= 320) && (mVideoHeight <= 240)){
+ params->setPreviewSize(mVideoWidth, mVideoHeight);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void *
+camera_buffer_get_omx_ptr (CameraBuffer *buffer)
+{
+ CAMHAL_LOGV("buffer_type %d opaque %p", buffer->type, buffer->opaque);
+
+ if (buffer->type == CAMERA_BUFFER_ANW) {
+ buffer_handle_t *handle = (buffer_handle_t *)buffer->opaque;
+ CAMHAL_LOGV("anw %08x", *handle);
+ return (void *)*handle;
+ } else {
+ CAMHAL_LOGV("other %08x", buffer->opaque);
+ return (void *)buffer->opaque;
+ }
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
new file mode 100644
index 0000000..ce01528
--- /dev/null
+++ b/camera/CameraHalCommon.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CameraHal.h"
+
+namespace Ti {
+namespace Camera {
+
+const char CameraHal::PARAMS_DELIMITER []= ",";
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::ppm_start;
+
+#endif
+
+#if PPM_INSTRUMENTATION
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ gettimeofday(&ppm, NULL);
+ ppm.tv_sec = ppm.tv_sec - ppm_start.tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec;
+
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
+}
+
+#elif PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor. This implemetation
+ will also dump the abosolute timestamp, which is useful when
+ post calculation is done with data coming from the upper
+ layers (Camera application etc.)
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ unsigned long long elapsed, absolute;
+ gettimeofday(&ppm, NULL);
+ elapsed = ppm.tv_sec - ppm_start.tv_sec;
+ elapsed *= 1000000;
+ elapsed += ppm.tv_usec - ppm_start.tv_usec;
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+
+ CAMHAL_LOGI("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
+}
+
+#endif
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Calculates and dumps the elapsed time using 'ppm_first' as
+ reference.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
+ char temp_str[256];
+ struct timeval ppm;
+ unsigned long long absolute;
+ va_list args;
+
+ va_start(args, ppm_first);
+ vsprintf(temp_str, str, args);
+ gettimeofday(&ppm, NULL);
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+ ppm.tv_sec = ppm.tv_sec - ppm_first->tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec;
+
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
+
+ va_end(args);
+}
+
+#endif
+
+
+/** Common utility function definitions used all over the HAL */
+
+unsigned int CameraHal::getBPP(const char* format) {
+ unsigned int bytesPerPixel;
+
+ // Calculate bytes per pixel based on the pixel format
+ if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0 ||
+ strcmp(format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ bytesPerPixel = 1;
+ } else {
+ bytesPerPixel = 1;
+ }
+
+ return bytesPerPixel;
+}
+
+void CameraHal::getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format)
+{
+ CAMHAL_ASSERT( x && y && format && (0U < stride) );
+
+ *x = (offset % stride) / getBPP(format);
+ *y = (offset / stride);
+}
+
+const char* CameraHal::getPixelFormatConstant(const char* parametersFormat)
+{
+ const char *pixelFormat = NULL;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ CAMHAL_LOGVA("CbYCrY format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if ( (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ // TODO(XXX): We are treating YV12 the same as YUV420SP
+ CAMHAL_LOGVA("YUV420SP format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ CAMHAL_LOGVA("RGB565 format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ CAMHAL_LOGVA("BAYER format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ } else if ( 0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_JPEG) ) {
+ CAMHAL_LOGVA("JPEG format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else {
+ CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+
+ return pixelFormat;
+}
+
+size_t CameraHal::calculateBufferSize(const char* parametersFormat, int width, int height)
+{
+ int bufferSize = -1;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ bufferSize = width * height * 2;
+ } else if ( (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ bufferSize = width * height * 3 / 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ bufferSize = width * height * 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ bufferSize = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ bufferSize = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ bufferSize = 0;
+ }
+
+ return bufferSize;
+}
+
+
+bool CameraHal::parsePair(const char *str, int *first, int *second, char delim)
+{
+ // Find the first integer.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If a delimeter does not immediately follow, give up.
+ if (*end != delim) {
+ CAMHAL_LOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+ return false;
+ }
+
+ // Find the second integer, immediately after the delimeter.
+ int h = (int)strtol(end+1, &end, 10);
+
+ *first = w;
+ *second = h;
+
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp
new file mode 100644
index 0000000..63d8453
--- /dev/null
+++ b/camera/CameraHalUtilClasses.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHalUtilClasses.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#include "CameraHal.h"
+
+namespace Ti {
+namespace Camera {
+
+/*--------------------FrameProvider Class STARTS here-----------------------------*/
+
+int FrameProvider::enableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mFrameNotifier->enableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mFrameCallback
+ , NULL
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::disableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->returnFrame(frameBuf, frameType);
+
+ return ret;
+}
+
+void FrameProvider::addFramePointers(CameraBuffer *frameBuf, android_ycbcr *ycbcr)
+{
+ mFrameNotifier->addFramePointers(frameBuf, ycbcr);
+ return;
+}
+
+void FrameProvider::removeFramePointers()
+{
+ mFrameNotifier->removeFramePointers();
+ return;
+}
+
+/*--------------------FrameProvider Class ENDS here-----------------------------*/
+
+/*--------------------EventProvider Class STARTS here-----------------------------*/
+
+int EventProvider::enableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mEventNotifier->enableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
+ , NULL
+ , mEventCallback
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int EventProvider::disableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/*--------------------EventProvider Class ENDS here-----------------------------*/
+
+/*--------------------CameraArea Class STARTS here-----------------------------*/
+
+status_t CameraArea::transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight)
+{
+ status_t ret = NO_ERROR;
+ size_t hRange, vRange;
+ double hScale, vScale;
+
+ LOG_FUNCTION_NAME
+
+ hRange = CameraArea::RIGHT - CameraArea::LEFT;
+ vRange = CameraArea::BOTTOM - CameraArea::TOP;
+ hScale = ( double ) width / ( double ) hRange;
+ vScale = ( double ) height / ( double ) vRange;
+
+ top = ( mTop + vRange / 2 ) * vScale;
+ left = ( mLeft + hRange / 2 ) * hScale;
+ areaHeight = ( mBottom + vRange / 2 ) * vScale;
+ areaHeight -= top;
+ areaWidth = ( mRight + hRange / 2) * hScale;
+ areaWidth -= left;
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t CameraArea::checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight)
+{
+
+ //Handles the invalid regin corner case.
+ if ( ( 0 == top ) && ( 0 == left ) && ( 0 == bottom ) && ( 0 == right ) && ( 0 == weight ) ) {
+ return NO_ERROR;
+ }
+
+ if ( ( CameraArea::WEIGHT_MIN > weight ) || ( CameraArea::WEIGHT_MAX < weight ) ) {
+ CAMHAL_LOGEB("Camera area weight is invalid %d", weight);
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::TOP > top ) || ( CameraArea::BOTTOM < top ) ) {
+ CAMHAL_LOGEB("Camera area top coordinate is invalid %d", top );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::TOP > bottom ) || ( CameraArea::BOTTOM < bottom ) ) {
+ CAMHAL_LOGEB("Camera area bottom coordinate is invalid %d", bottom );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::LEFT > left ) || ( CameraArea::RIGHT < left ) ) {
+ CAMHAL_LOGEB("Camera area left coordinate is invalid %d", left );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::LEFT > right ) || ( CameraArea::RIGHT < right ) ) {
+ CAMHAL_LOGEB("Camera area right coordinate is invalid %d", right );
+ return -EINVAL;
+ }
+
+ if ( left >= right ) {
+ CAMHAL_LOGEA("Camera area left larger than right");
+ return -EINVAL;
+ }
+
+ if ( top >= bottom ) {
+ CAMHAL_LOGEA("Camera area top larger than bottom");
+ return -EINVAL;
+ }
+
+ return NO_ERROR;
+}
+
+status_t CameraArea::parseAreas(const char *area,
+ size_t areaLength,
+ android::Vector<android::sp<CameraArea> > &areas)
+{
+ status_t ret = NO_ERROR;
+ char *ctx;
+ char *pArea = NULL;
+ char *pStart = NULL;
+ char *pEnd = NULL;
+ const char *startToken = "(";
+ const char endToken = ')';
+ const char sep = ',';
+ ssize_t top, left, bottom, right, weight;
+ char *tmpBuffer = NULL;
+ android::sp<CameraArea> currentArea;
+
+ LOG_FUNCTION_NAME
+
+ if ( ( NULL == area ) ||
+ ( 0 >= areaLength ) )
+ {
+ return -EINVAL;
+ }
+
+ tmpBuffer = ( char * ) malloc(areaLength);
+ if ( NULL == tmpBuffer )
+ {
+ return -ENOMEM;
+ }
+
+ memcpy(tmpBuffer, area, areaLength);
+
+ pArea = strtok_r(tmpBuffer, startToken, &ctx);
+
+ do
+ {
+
+ pStart = pArea;
+ if ( NULL == pStart )
+ {
+ CAMHAL_LOGEA("Parsing of the left area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the top area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the right area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the weight area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( endToken != *pEnd )
+ {
+ CAMHAL_LOGEA("Malformed area!");
+ ret = -EINVAL;
+ break;
+ }
+
+ ret = checkArea(top, left, bottom, right, weight);
+ if ( NO_ERROR != ret ) {
+ break;
+ }
+
+ currentArea = new CameraArea(top, left, bottom, right, weight);
+ CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d",
+ ( int ) top,
+ ( int ) left,
+ ( int ) bottom,
+ ( int ) right,
+ ( int ) weight);
+ if ( NULL != currentArea.get() )
+ {
+ areas.add(currentArea);
+ }
+ else
+ {
+ ret = -ENOMEM;
+ break;
+ }
+
+ pArea = strtok_r(NULL, startToken, &ctx);
+
+ }
+ while ( NULL != pArea );
+
+ if ( NULL != tmpBuffer )
+ {
+ free(tmpBuffer);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+bool CameraArea::areAreasDifferent(android::Vector< android::sp<CameraArea> > &area1,
+ android::Vector< android::sp<CameraArea> > &area2) {
+ if (area1.size() != area2.size()) {
+ return true;
+ }
+
+ // not going to care about sorting order for now
+ for (int i = 0; i < area1.size(); i++) {
+ if (!area1.itemAt(i)->compare(area2.itemAt(i))) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool CameraArea::compare(const android::sp<CameraArea> &area) {
+ return ((mTop == area->mTop) && (mLeft == area->mLeft) &&
+ (mBottom == area->mBottom) && (mRight == area->mRight) &&
+ (mWeight == area->mWeight));
+}
+
+
+/*--------------------CameraArea Class ENDS here-----------------------------*/
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
new file mode 100644
index 0000000..e7af6bf
--- /dev/null
+++ b/camera/CameraHal_Module.cpp
@@ -0,0 +1,830 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#include <utils/threads.h>
+
+#include "CameraHal.h"
+#include "CameraProperties.h"
+#include "TICameraParameters.h"
+
+
+#ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME LOG_FUNCTION_NAME
+#else
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME
+#endif
+
+
+namespace Ti {
+namespace Camera {
+
+static CameraProperties gCameraProperties;
+static CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+static unsigned int gCamerasOpen = 0;
+static android::Mutex gCameraHalDeviceLock;
+
+static int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device);
+static int camera_device_close(hw_device_t* device);
+static int camera_get_number_of_cameras(void);
+static int camera_get_camera_info(int camera_id, struct camera_info *info);
+
+static struct hw_module_methods_t camera_module_methods = {
+ open: camera_device_open
+};
+
+} // namespace Camera
+} // namespace Ti
+
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 1,
+ version_minor: 0,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "TI OMAP CameraHal Module",
+ author: "TI",
+ methods: &Ti::Camera::camera_module_methods,
+ dso: NULL, /* remove compilation warnings */
+ reserved: {0}, /* remove compilation warnings */
+ },
+ get_number_of_cameras: Ti::Camera::camera_get_number_of_cameras,
+ get_camera_info: Ti::Camera::camera_get_camera_info,
+};
+
+
+namespace Ti {
+namespace Camera {
+
+typedef struct ti_camera_device {
+ camera_device_t base;
+ /* TI specific "private" data can go here (base.priv) */
+ int cameraid;
+} ti_camera_device_t;
+
+
+/*******************************************************************
+ * implementation of camera_device_ops functions
+ *******************************************************************/
+
+int camera_set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setPreviewWindow(window);
+
+ return rv;
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_set_extended_preview_ops(struct camera_device * device,
+ preview_stream_extended_ops_t * extendedOps)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ if (!device) {
+ return BAD_VALUE;
+ }
+
+ ti_camera_device_t * const tiDevice = reinterpret_cast<ti_camera_device_t*>(device);
+ gCameraHals[tiDevice->cameraid]->setExtendedPreviewStreamOps(extendedOps);
+
+ return OK;
+}
+
+int camera_set_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setBufferSource(tapin, tapout);
+
+ return rv;
+}
+
+int camera_release_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->releaseBufferSource(tapin, tapout);
+
+ return rv;
+}
+#endif
+
+void camera_set_callbacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
+}
+
+void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->enableMsgType(msg_type);
+}
+
+void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->disableMsgType(msg_type);
+}
+
+int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return 0;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ return gCameraHals[ti_dev->cameraid]->msgTypeEnabled(msg_type);
+}
+
+int camera_start_preview(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->startPreview();
+
+ return rv;
+}
+
+void camera_stop_preview(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->stopPreview();
+}
+
+int camera_preview_enabled(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->previewEnabled();
+ return rv;
+}
+
+int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ // TODO: meta data buffer not current supported
+ rv = gCameraHals[ti_dev->cameraid]->storeMetaDataInBuffers(enable);
+ return rv;
+ //return enable ? android::INVALID_OPERATION: android::OK;
+}
+
+int camera_start_recording(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->startRecording();
+ return rv;
+}
+
+void camera_stop_recording(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->stopRecording();
+}
+
+int camera_recording_enabled(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->recordingEnabled();
+ return rv;
+}
+
+void camera_release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->releaseRecordingFrame(opaque);
+}
+
+int camera_auto_focus(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->autoFocus();
+ return rv;
+}
+
+int camera_cancel_auto_focus(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancelAutoFocus();
+ return rv;
+}
+
+int camera_take_picture(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(0);
+ return rv;
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_take_picture_with_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(params);
+ return rv;
+}
+#endif
+
+int camera_cancel_picture(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancelPicture();
+ return rv;
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_reprocess(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->reprocess(params);
+ return rv;
+}
+
+int camera_cancel_reprocess(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancel_reprocess();
+ return rv;
+}
+#endif
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setParameters(params);
+ return rv;
+}
+
+char* camera_get_parameters(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ char* param = NULL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return NULL;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ param = gCameraHals[ti_dev->cameraid]->getParameters();
+
+ return param;
+}
+
+static void camera_put_parameters(struct camera_device *device, char *parms)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->putParameters(parms);
+}
+
+int camera_send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( cmd == CAMERA_CMD_SETUP_EXTENDED_OPERATIONS ) {
+ camera_device_extended_ops_t * const ops = static_cast<camera_device_extended_ops_t*>(
+ camera_cmd_send_command_args_to_pointer(arg1, arg2));
+
+ ops->set_extended_preview_ops = camera_set_extended_preview_ops;
+ ops->set_buffer_source = camera_set_buffer_source;
+ ops->release_buffer_source = camera_release_buffer_source;
+ ops->take_picture_with_parameters = camera_take_picture_with_parameters;
+ ops->reprocess = camera_reprocess;
+ ops->cancel_reprocess = camera_cancel_reprocess;
+ return OK;
+ }
+#endif
+
+ rv = gCameraHals[ti_dev->cameraid]->sendCommand(cmd, arg1, arg2);
+ return rv;
+}
+
+void camera_release(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->release();
+}
+
+int camera_dump(struct camera_device * device, int fd)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->dump(fd);
+ return rv;
+}
+
+extern "C" void heaptracker_free_leaked_memory(void);
+
+int camera_device_close(hw_device_t* device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int ret = 0;
+ ti_camera_device_t* ti_dev = NULL;
+
+ android::AutoMutex lock(gCameraHalDeviceLock);
+
+ if (!device) {
+ ret = -EINVAL;
+ goto done;
+ }
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ if (ti_dev) {
+ if (gCameraHals[ti_dev->cameraid]) {
+ delete gCameraHals[ti_dev->cameraid];
+ gCameraHals[ti_dev->cameraid] = NULL;
+ gCamerasOpen--;
+ }
+
+ if (ti_dev->base.ops) {
+ free(ti_dev->base.ops);
+ }
+ free(ti_dev);
+ }
+done:
+#ifdef HEAPTRACKER
+ heaptracker_free_leaked_memory();
+#endif
+ return ret;
+}
+
+/*******************************************************************
+ * implementation of camera_module functions
+ *******************************************************************/
+
+/* open device handle to one of the cameras
+ *
+ * assume camera service will keep singleton of each camera
+ * so this function will always only be called once per camera instance
+ */
+
+int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device)
+{
+ int rv = 0;
+ int num_cameras = 0;
+ int cameraid;
+ ti_camera_device_t* camera_device = NULL;
+ camera_device_ops_t* camera_ops = NULL;
+ CameraHal* camera = NULL;
+ CameraProperties::Properties* properties = NULL;
+
+ CAMHAL_LOGE("CameraHal_Module: ******************* 1 *******************");
+
+ android::AutoMutex lock(gCameraHalDeviceLock);
+
+ CAMHAL_LOGI("camera_device open");
+
+ if (name != NULL) {
+ cameraid = atoi(name);
+ num_cameras = gCameraProperties.camerasSupported();
+
+ if(cameraid > num_cameras)
+ {
+ CAMHAL_LOGE("camera service provided cameraid out of bounds, "
+ "cameraid = %d, num supported = %d",
+ cameraid, num_cameras);
+ rv = -EINVAL;
+ goto fail;
+ }
+
+ if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
+ {
+ CAMHAL_LOGE("maximum number of cameras already open");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
+ if(!camera_device)
+ {
+ CAMHAL_LOGE("camera_device allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
+ if(!camera_ops)
+ {
+ CAMHAL_LOGE("camera_ops allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ memset(camera_device, 0, sizeof(*camera_device));
+ memset(camera_ops, 0, sizeof(*camera_ops));
+
+ camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
+ camera_device->base.common.version = 0;
+ camera_device->base.common.module = (hw_module_t *)(module);
+ camera_device->base.common.close = camera_device_close;
+ camera_device->base.ops = camera_ops;
+
+ camera_ops->set_preview_window = camera_set_preview_window;
+ camera_ops->set_callbacks = camera_set_callbacks;
+ camera_ops->enable_msg_type = camera_enable_msg_type;
+ camera_ops->disable_msg_type = camera_disable_msg_type;
+ camera_ops->msg_type_enabled = camera_msg_type_enabled;
+ camera_ops->start_preview = camera_start_preview;
+ camera_ops->stop_preview = camera_stop_preview;
+ camera_ops->preview_enabled = camera_preview_enabled;
+ camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
+ camera_ops->start_recording = camera_start_recording;
+ camera_ops->stop_recording = camera_stop_recording;
+ camera_ops->recording_enabled = camera_recording_enabled;
+ camera_ops->release_recording_frame = camera_release_recording_frame;
+ camera_ops->auto_focus = camera_auto_focus;
+ camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
+ camera_ops->take_picture = camera_take_picture;
+ camera_ops->cancel_picture = camera_cancel_picture;
+ camera_ops->set_parameters = camera_set_parameters;
+ camera_ops->get_parameters = camera_get_parameters;
+ camera_ops->put_parameters = camera_put_parameters;
+ camera_ops->send_command = camera_send_command;
+ camera_ops->release = camera_release;
+ camera_ops->dump = camera_dump;
+
+ *device = &camera_device->base.common;
+
+ // -------- TI specific stuff --------
+
+ camera_device->cameraid = cameraid;
+
+ if(gCameraProperties.getProperties(cameraid, &properties) < 0)
+ {
+ CAMHAL_LOGE("Couldn't get camera properties");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera = new CameraHal(cameraid);
+
+ if(!camera)
+ {
+ CAMHAL_LOGE("Couldn't create instance of CameraHal class");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ if(properties && (camera->initialize(properties) != NO_ERROR))
+ {
+ CAMHAL_LOGE("Couldn't initialize camera instance");
+ rv = -ENODEV;
+ goto fail;
+ }
+
+ gCameraHals[cameraid] = camera;
+ gCamerasOpen++;
+ }
+
+ return rv;
+
+fail:
+ if(camera_device) {
+ free(camera_device);
+ camera_device = NULL;
+ }
+ if(camera_ops) {
+ free(camera_ops);
+ camera_ops = NULL;
+ }
+ if(camera) {
+ delete camera;
+ camera = NULL;
+ }
+ *device = NULL;
+ return rv;
+}
+
+int camera_get_number_of_cameras(void)
+{
+ int num_cameras = MAX_CAMERAS_SUPPORTED;
+
+ num_cameras = gCameraProperties.camerasSupported();
+
+ return num_cameras;
+}
+
+int camera_get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rv = 0;
+ int face_value = CAMERA_FACING_BACK;
+ int orientation = 0;
+ const char *valstr = NULL;
+ CameraProperties::Properties* properties = NULL;
+
+ // this going to be the first call from camera service
+ // initialize camera properties here...
+ if(gCameraProperties.initialize() != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
+ rv = -EINVAL;
+ goto end;
+ }
+
+ //Get camera properties for camera index
+ if(gCameraProperties.getProperties(camera_id, &properties) < 0)
+ {
+ CAMHAL_LOGE("Couldn't get camera properties");
+ rv = -EINVAL;
+ goto end;
+ }
+
+ if(properties)
+ {
+ valstr = properties->get(CameraProperties::FACING_INDEX);
+ if(valstr != NULL)
+ {
+ if (strcmp(valstr, TICameraParameters::FACING_FRONT) == 0)
+ {
+ face_value = CAMERA_FACING_FRONT;
+ }
+ else if (strcmp(valstr, TICameraParameters::FACING_BACK) == 0)
+ {
+ face_value = CAMERA_FACING_BACK;
+ }
+ }
+
+ valstr = properties->get(CameraProperties::ORIENTATION_INDEX);
+ if(valstr != NULL)
+ {
+ orientation = atoi(valstr);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("getProperties() returned a NULL property set for Camera id %d", camera_id);
+ }
+
+ info->facing = face_value;
+ info->orientation = orientation;
+
+end:
+ return rv;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
new file mode 100644
index 0000000..1f99ccb
--- /dev/null
+++ b/camera/CameraParameters.cpp
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#include "CameraProperties.h"
+
+namespace Ti {
+namespace Camera {
+
+const char CameraProperties::INVALID[]="prop-invalid-key";
+const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
+const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index";
+const char CameraProperties::CAMERA_SENSOR_ID[] = "prop-sensor-id";
+const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation";
+const char CameraProperties::FACING_INDEX[]="prop-facing";
+const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "prop-preview-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "prop-preview-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "prop-preview-sidebyside-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT[] = "prop-preview-frame-rate-ext-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "prop-picture-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "prop-picture-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "prop-picture-sidebyside-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values";
+const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values";
+const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values";
+const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values";
+const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values";
+const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN[] = "prop-manual-exposure-min";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX[] = "prop-manual-exposure-max";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP[] = "prop-manual-exposure-step";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "prop-manual-gain-iso-min";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "prop-manual-gain-iso-max";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "prop-manual-gain-iso-step";
+const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max";
+const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min";
+const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step";
+const char CameraProperties::SUPPORTED_ISO_VALUES[] = "prop-iso-mode-values";
+const char CameraProperties::SUPPORTED_SCENE_MODES[] = "prop-scene-mode-values";
+const char CameraProperties::SUPPORTED_FLASH_MODES[] = "prop-flash-mode-values";
+const char CameraProperties::SUPPORTED_FOCUS_MODES[] = "prop-focus-mode-values";
+const char CameraProperties::REQUIRED_PREVIEW_BUFS[] = "prop-required-preview-bufs";
+const char CameraProperties::REQUIRED_IMAGE_BUFS[] = "prop-required-image-bufs";
+const char CameraProperties::SUPPORTED_ZOOM_RATIOS[] = "prop-zoom-ratios";
+const char CameraProperties::SUPPORTED_ZOOM_STAGES[] = "prop-zoom-stages";
+const char CameraProperties::SUPPORTED_IPP_MODES[] = "prop-ipp-values";
+const char CameraProperties::SMOOTH_ZOOM_SUPPORTED[] = "prop-smooth-zoom-supported";
+const char CameraProperties::ZOOM_SUPPORTED[] = "prop-zoom-supported";
+const char CameraProperties::PREVIEW_SIZE[] = "prop-preview-size-default";
+const char CameraProperties::PREVIEW_FORMAT[] = "prop-preview-format-default";
+const char CameraProperties::PREVIEW_FRAME_RATE[] = "prop-preview-frame-rate-default";
+const char CameraProperties::ZOOM[] = "prop-zoom-default";
+const char CameraProperties::PICTURE_SIZE[] = "prop-picture-size-default";
+const char CameraProperties::PICTURE_FORMAT[] = "prop-picture-format-default";
+const char CameraProperties::JPEG_THUMBNAIL_SIZE[] = "prop-jpeg-thumbnail-size-default";
+const char CameraProperties::WHITEBALANCE[] = "prop-whitebalance-default";
+const char CameraProperties::EFFECT[] = "prop-effect-default";
+const char CameraProperties::ANTIBANDING[] = "prop-antibanding-default";
+const char CameraProperties::EXPOSURE_MODE[] = "prop-exposure-mode-default";
+const char CameraProperties::EV_COMPENSATION[] = "prop-ev-compensation-default";
+const char CameraProperties::ISO_MODE[] = "prop-iso-mode-default";
+const char CameraProperties::FOCUS_MODE[] = "prop-focus-mode-default";
+const char CameraProperties::SCENE_MODE[] = "prop-scene-mode-default";
+const char CameraProperties::FLASH_MODE[] = "prop-flash-mode-default";
+const char CameraProperties::JPEG_QUALITY[] = "prop-jpeg-quality-default";
+const char CameraProperties::CONTRAST[] = "prop-contrast-default";
+const char CameraProperties::BRIGHTNESS[] = "prop-brightness-default";
+const char CameraProperties::SATURATION[] = "prop-saturation-default";
+const char CameraProperties::SHARPNESS[] = "prop-sharpness-default";
+const char CameraProperties::IPP[] = "prop-ipp-default";
+const char CameraProperties::GBCE[] = "prop-gbce-default";
+const char CameraProperties::SUPPORTED_GBCE[] = "prop-gbce-supported";
+const char CameraProperties::GLBCE[] = "prop-glbce-default";
+const char CameraProperties::SUPPORTED_GLBCE[] = "prop-glbce-supported";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT[] = "prop-s3d-prv-frame-layout";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES[] = "prop-s3d-prv-frame-layout-values";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT[] = "prop-s3d-cap-frame-layout";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES[] = "prop-s3d-cap-frame-layout-values";
+const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode";
+const char CameraProperties::AUTOCONVERGENCE_MODE_VALUES[] = "prop-auto-convergence-mode-values";
+const char CameraProperties::MANUAL_CONVERGENCE[] = "prop-manual-convergence";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "prop-supported-manual-convergence-min";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "prop-supported-manual-convergence-max";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "prop-supported-manual-convergence-step";
+const char CameraProperties::VSTAB[] = "prop-vstab-default";
+const char CameraProperties::VSTAB_SUPPORTED[] = "prop-vstab-supported";
+const char CameraProperties::VNF[] = "prop-vnf-default";
+const char CameraProperties::VNF_SUPPORTED[] = "prop-vnf-supported";
+const char CameraProperties::REVISION[] = "prop-revision";
+const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length";
+const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle";
+const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle";
+const char CameraProperties::FRAMERATE_RANGE[] = "prop-framerate-range-default";
+const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values";
+const char CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED[]="prop-framerate-range-ext-values";
+const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation";
+const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values";
+const char CameraProperties::EXIF_MAKE[] = "prop-exif-make";
+const char CameraProperties::EXIF_MODEL[] = "prop-exif-model";
+const char CameraProperties::JPEG_THUMBNAIL_QUALITY[] = "prop-jpeg-thumbnail-quality-default";
+const char CameraProperties::MAX_FOCUS_AREAS[] = "prop-max-focus-areas";
+const char CameraProperties::MAX_FD_HW_FACES[] = "prop-max-fd-hw-faces";
+const char CameraProperties::MAX_FD_SW_FACES[] = "prop-max-fd-sw-faces";
+const char CameraProperties::AUTO_EXPOSURE_LOCK[] = "prop-auto-exposure-lock";
+const char CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED[] = "prop-auto-exposure-lock-supported";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK[] = "prop-auto-whitebalance-lock";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "prop-auto-whitebalance-lock-supported";
+const char CameraProperties::MAX_NUM_METERING_AREAS[] = "prop-max-num-metering-areas";
+const char CameraProperties::METERING_AREAS[] = "prop-metering-areas";
+const char CameraProperties::VIDEO_SNAPSHOT_SUPPORTED[] = "prop-video-snapshot-supported";
+const char CameraProperties::VIDEO_SIZE[] = "video-size";
+const char CameraProperties::SUPPORTED_VIDEO_SIZES[] = "video-size-values";
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "prop-mechanical-misalignment-correction-supported";
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION[] = "prop-mechanical-misalignment-correction";
+const char CameraProperties::CAP_MODE_VALUES[] = "prop-mode-values";
+const char CameraProperties::RAW_WIDTH[] = "prop-raw-width-values";
+const char CameraProperties::RAW_HEIGHT[] = "prop-raw-height-values";
+const char CameraProperties::MAX_PICTURE_WIDTH[] = "prop-max-picture-width";
+const char CameraProperties::MAX_PICTURE_HEIGHT[] = "prop-max-picture-height";
+
+const char CameraProperties::DEFAULT_VALUE[] = "";
+
+const char CameraProperties::PARAMS_DELIMITER []= ",";
+
+// Returns the properties class for a specific Camera
+// Each value is indexed by the CameraProperties::CameraPropertyIndex enum
+int CameraProperties::getProperties(int cameraIndex, CameraProperties::Properties** properties)
+{
+ LOG_FUNCTION_NAME;
+
+ if(cameraIndex >= MAX_CAMERAS_SUPPORTED)
+ {
+ CAMHAL_LOGEB("Camera index is incorrect index:%d supported:%d", cameraIndex, mCamerasSupported);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ *properties = mCameraProps+cameraIndex;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return 0;
+}
+
+void CameraProperties::Properties::set(const char * const prop, const char * const value) {
+ CAMHAL_ASSERT(prop);
+
+ if ( !value ) {
+ mProperties[mCurrentMode].removeItem(android::String8(prop));
+ } else {
+ mProperties[mCurrentMode].replaceValueFor(android::String8(prop), android::String8(value));
+ }
+}
+
+void CameraProperties::Properties::set(const char * const prop, const int value) {
+ char s_val[30];
+ sprintf(s_val, "%d", value);
+ set(prop, s_val);
+}
+
+const char* CameraProperties::Properties::get(const char * prop) const {
+ return mProperties[mCurrentMode].valueFor(android::String8(prop)).string();
+}
+
+int CameraProperties::Properties::getInt(const char * prop) const {
+ android::String8 value = mProperties[mCurrentMode].valueFor(android::String8(prop));
+ if (value.isEmpty()) {
+ return -1;
+ }
+ return strtol(value, 0, 0);
+}
+
+void CameraProperties::Properties::setSensorIndex(int idx) {
+ OperatingMode originalMode = getMode();
+ for ( int i = 0 ; i < MODE_MAX ; i++ ) {
+ setMode(static_cast<OperatingMode>(i));
+ set(CAMERA_SENSOR_INDEX, idx);
+ }
+ setMode(originalMode);
+}
+
+void CameraProperties::Properties::setMode(OperatingMode mode) {
+ CAMHAL_ASSERT(mode >= 0 && mode < MODE_MAX);
+ mCurrentMode = mode;
+}
+
+OperatingMode CameraProperties::Properties::getMode() const {
+ return mCurrentMode;
+}
+
+void CameraProperties::Properties::dump() {
+ CAMHAL_LOGD("================================");
+ CAMHAL_LOGD("Dumping properties for camera: %d", getInt("prop-sensor-index"));
+
+ for (size_t i = 0; i < mProperties[mCurrentMode].size(); i++) {
+ CAMHAL_LOGD("%s = %s",
+ mProperties[mCurrentMode].keyAt(i).string(),
+ mProperties[mCurrentMode].valueAt(i).string());
+ }
+
+ CAMHAL_LOGD("--------------------------------");
+}
+
+const char* CameraProperties::Properties::keyAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].keyAt(index).string();
+ }
+ return NULL;
+}
+
+const char* CameraProperties::Properties::valueAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].valueAt(index).string();
+ }
+ return NULL;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp
new file mode 100644
index 0000000..240d8cb
--- /dev/null
+++ b/camera/CameraProperties.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#include "CameraProperties.h"
+
+#define CAMERA_ROOT "CameraRoot"
+#define CAMERA_INSTANCE "CameraInstance"
+
+namespace Ti {
+namespace Camera {
+
+// lower entries have higher priority
+static const char* g_camera_adapters[] = {
+#ifdef OMAP4_SUPPORT_OMX_CAMERA_ADAPTER
+ "libomxcameraadapter.so",
+#endif
+#ifdef OMAP4_SUPPORT_USB_CAMERA_ADAPTER
+ "libusbcameraadapter.so"
+#endif
+};
+
+/*********************************************************
+ CameraProperties - public function implemetation
+**********************************************************/
+
+CameraProperties::CameraProperties() : mCamerasSupported(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mCamerasSupported = 0;
+ mInitialized = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraProperties::~CameraProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+// Initializes the CameraProperties class
+status_t CameraProperties::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret;
+
+ android::AutoMutex lock(mLock);
+
+ if(mInitialized)
+ return NO_ERROR;
+
+ ret = loadProperties();
+
+ if (ret == NO_ERROR) {
+ mInitialized = 1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+extern "C" status_t CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ int starting_camera, int max_camera, int & supported_cameras);
+
+///Loads all the Camera related properties
+status_t CameraProperties::loadProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ //Must be re-initialized here, since loadProperties() could potentially be called more than once.
+ mCamerasSupported = 0;
+
+ // adapter updates capabilities and we update camera count
+ const status_t err = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported,
+ MAX_CAMERAS_SUPPORTED, mCamerasSupported);
+
+ if(err != NO_ERROR) {
+ CAMHAL_LOGE("error while getting capabilities");
+ ret = UNKNOWN_ERROR;
+ } else if (mCamerasSupported == 0) {
+ CAMHAL_LOGE("camera busy or no cameras detexted. properties not loaded. num_cameras = %d", mCamerasSupported);
+ ret = NO_ERROR; //No camera on board
+ } else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) {
+ CAMHAL_LOGE("returned too many adapaters");
+ ret = UNKNOWN_ERROR;
+ } else {
+ CAMHAL_LOGI("num_cameras = %d", mCamerasSupported);
+
+ for (int i = 0; i < mCamerasSupported; i++) {
+ mCameraProps[i].setSensorIndex(i);
+ mCameraProps[i].dump();
+ }
+ }
+
+ CAMHAL_LOGV("mCamerasSupported = %d", mCamerasSupported);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+// Returns the number of Cameras found
+int CameraProperties::camerasSupported()
+{
+ LOG_FUNCTION_NAME;
+ return MAX_CAMERAS_SUPPORTED;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/DecoderFactory.cpp b/camera/DecoderFactory.cpp
new file mode 100644
index 0000000..ba4ae88
--- /dev/null
+++ b/camera/DecoderFactory.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FrameDecoder.h"
+#include "SwFrameDecoder.h"
+#include "OmxFrameDecoder.h"
+#include "CameraHal.h"
+#include "DecoderFactory.h"
+
+namespace Ti {
+namespace Camera {
+
+
+FrameDecoder* DecoderFactory::createDecoderByType(DecoderType type, bool forceSwDecoder) {
+ FrameDecoder* decoder = NULL;
+ switch (type) {
+ case DecoderType_MJPEG: {
+
+#ifndef OMX_CAMERA_ADAPTER
+ /* If OMX Camera Adapter is not used, OMX implementation is not available in the device, switch to sw
+ * decoder
+ */
+ forceSwDecoder = true;
+#endif
+ if (!forceSwDecoder) {
+ decoder = new OmxFrameDecoder(DecoderType_MJPEG);
+ CAMHAL_LOGD("Using HW Decoder for MJPEG");
+ } else {
+ decoder = new SwFrameDecoder();
+ CAMHAL_LOGD("Using SW Decoder for MJPEG");
+ }
+
+ //TODO add logic that handle verification is HW Decoder is available ?
+ // And if no - create SW decoder.
+ break;
+ }
+#ifdef OMX_CAMERA_ADAPTER
+ case DecoderType_H264: {
+ decoder = new OmxFrameDecoder(DecoderType_H264);
+ CAMHAL_LOGD("Using HW Decoder for H264");
+ break;
+ }
+#endif
+ default: {
+ CAMHAL_LOGE("Unrecognized decoder type %d", type);
+ }
+ }
+
+ return decoder;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/Decoder_libjpeg.cpp b/camera/Decoder_libjpeg.cpp
new file mode 100644
index 0000000..35c343f
--- /dev/null
+++ b/camera/Decoder_libjpeg.cpp
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Decoder_libjpeg.h"
+
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+#define NUM_COMPONENTS_IN_YUV 3
+
+namespace Ti {
+namespace Camera {
+
+/* JPEG DHT Segment omitted from MJPEG data */
+static unsigned char jpeg_odml_dht[0x1a6] = {
+ 0xff, 0xd8, /* Start of Image */
+ 0xff, 0xc4, 0x01, 0xa2, /* Define Huffman Table */
+
+ 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x01, 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7d,
+ 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
+ 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
+ 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
+ 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
+ 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
+ 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
+ 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa,
+
+ 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00, 0x01, 0x02, 0x77,
+ 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
+ 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
+ 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
+ 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
+ 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
+ 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
+ 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
+ 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
+ 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa
+};
+
+struct libjpeg_source_mgr : jpeg_source_mgr {
+ libjpeg_source_mgr(unsigned char *buffer_ptr, int len);
+ ~libjpeg_source_mgr();
+
+ unsigned char *mBufferPtr;
+ int mFilledLen;
+};
+
+static void libjpeg_init_source(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ src->current_offset = 0;
+}
+
+static boolean libjpeg_seek_input_data(j_decompress_ptr cinfo, long byte_offset) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset = byte_offset;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr + byte_offset;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static boolean libjpeg_fill_input_buffer(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset += src->mFilledLen;
+ src->next_input_byte = src->mBufferPtr;
+ src->bytes_in_buffer = src->mFilledLen;
+ return TRUE;
+}
+
+static void libjpeg_skip_input_data(j_decompress_ptr cinfo, long num_bytes) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+
+ if (num_bytes > (long)src->bytes_in_buffer) {
+ CAMHAL_LOGEA("\n\n\n libjpeg_skip_input_data - num_bytes > (long)src->bytes_in_buffer \n\n\n");
+ } else {
+ src->next_input_byte += num_bytes;
+ src->bytes_in_buffer -= num_bytes;
+ }
+}
+
+static boolean libjpeg_resync_to_restart(j_decompress_ptr cinfo, int desired) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static void libjpeg_term_source(j_decompress_ptr /*cinfo*/) {}
+
+libjpeg_source_mgr::libjpeg_source_mgr(unsigned char *buffer_ptr, int len) : mBufferPtr(buffer_ptr), mFilledLen(len) {
+ init_source = libjpeg_init_source;
+ fill_input_buffer = libjpeg_fill_input_buffer;
+ skip_input_data = libjpeg_skip_input_data;
+ resync_to_restart = libjpeg_resync_to_restart;
+ term_source = libjpeg_term_source;
+ seek_input_data = libjpeg_seek_input_data;
+}
+
+libjpeg_source_mgr::~libjpeg_source_mgr() {}
+
+Decoder_libjpeg::Decoder_libjpeg()
+{
+ mWidth = 0;
+ mHeight = 0;
+ Y_Plane = NULL;
+ U_Plane = NULL;
+ V_Plane = NULL;
+ UV_Plane = NULL;
+}
+
+Decoder_libjpeg::~Decoder_libjpeg()
+{
+ release();
+}
+
+void Decoder_libjpeg::release()
+{
+ if (Y_Plane) {
+ free(Y_Plane);
+ Y_Plane = NULL;
+ }
+ if (U_Plane) {
+ free(U_Plane);
+ U_Plane = NULL;
+ }
+ if (V_Plane) {
+ free(V_Plane);
+ V_Plane = NULL;
+ }
+ if (UV_Plane) {
+ free(UV_Plane);
+ UV_Plane = NULL;
+ }
+}
+
+int Decoder_libjpeg::readDHTSize()
+{
+ return sizeof(jpeg_odml_dht);
+}
+
+// 0xFF 0xC4 - DHT (Define Huffman Table) marker
+// 0xFF 0xD9 - SOI (Start Of Image) marker
+// 0xFF 0xD9 - EOI (End Of Image) marker
+// This function return true if if found DHT
+bool Decoder_libjpeg::isDhtExist(unsigned char *jpeg_src, int filled_len) {
+ if (filled_len <= 0) {
+ return false;
+ }
+
+ for (int i = 1; i < filled_len; i++) {
+ if((jpeg_src[i - 1] == 0xFF) && (jpeg_src[i] == 0xC4)) {
+ CAMHAL_LOGD("Found DHT (Define Huffman Table) marker");
+ return true;
+ }
+ }
+ return false;
+}
+
+int Decoder_libjpeg::appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size)
+{
+ /* Appending DHT to JPEG */
+
+ int len = filled_len + sizeof(jpeg_odml_dht) - 2; // final length of jpeg data
+ if (len > buff_size) {
+ CAMHAL_LOGEA("\n\n\n Buffer size too small. filled_len=%d, buff_size=%d, sizeof(jpeg_odml_dht)=%d\n\n\n", filled_len, buff_size, sizeof(jpeg_odml_dht));
+ return 0;
+ }
+
+ memcpy(jpeg_with_dht_buffer, jpeg_odml_dht, sizeof(jpeg_odml_dht));
+ memcpy((jpeg_with_dht_buffer + sizeof(jpeg_odml_dht)), jpeg_src + 2, (filled_len - 2));
+ return len;
+}
+
+
+bool Decoder_libjpeg::decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride)
+{
+ struct jpeg_decompress_struct cinfo;
+ struct jpeg_error_mgr jerr;
+ struct libjpeg_source_mgr s_mgr(jpeg_src, filled_len);
+
+ if (filled_len == 0)
+ return false;
+
+ cinfo.err = jpeg_std_error(&jerr);
+ jpeg_create_decompress(&cinfo);
+
+ cinfo.src = &s_mgr;
+ int status = jpeg_read_header(&cinfo, true);
+ if (status != JPEG_HEADER_OK) {
+ CAMHAL_LOGEA("jpeg header corrupted");
+ return false;
+ }
+
+ cinfo.out_color_space = JCS_YCbCr;
+ cinfo.raw_data_out = true;
+ status = jpeg_start_decompress(&cinfo);
+ if (!status){
+ CAMHAL_LOGEA("jpeg_start_decompress failed");
+ return false;
+ }
+
+ if (mWidth == 0){
+ mWidth = cinfo.output_width;
+ mHeight = cinfo.output_height;
+ CAMHAL_LOGEA("w x h = %d x %d. stride=%d", cinfo.output_width, cinfo.output_height, stride);
+ }
+ else if ((cinfo.output_width > mWidth) || (cinfo.output_height > mHeight)) {
+ CAMHAL_LOGEA(" Free the existing buffers so that they are reallocated for new w x h. Old WxH = %dx%d. New WxH = %dx%d",
+ mWidth, mHeight, cinfo.output_width, cinfo.output_height);
+ release();
+ mWidth = cinfo.output_width;
+ mHeight = cinfo.output_height;
+ }
+
+ unsigned int decoded_uv_buffer_size = cinfo.output_width * cinfo.output_height / 2;
+ if (Y_Plane == NULL)Y_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (U_Plane == NULL)U_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (V_Plane == NULL)V_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (UV_Plane == NULL) UV_Plane = (unsigned char *)malloc(decoded_uv_buffer_size);
+
+ unsigned char **YUV_Planes[NUM_COMPONENTS_IN_YUV];
+ YUV_Planes[0] = Y_Plane;
+ YUV_Planes[1] = U_Plane;
+ YUV_Planes[2] = V_Plane;
+
+ unsigned char *row = &nv12_buffer[0];
+
+ // Y Component
+ for (unsigned int j = 0; j < cinfo.output_height; j++, row += stride)
+ YUV_Planes[0][j] = row;
+
+ row = &UV_Plane[0];
+
+ // U Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[1][j+0] = row;
+ YUV_Planes[1][j+1] = row;
+ }
+
+ // V Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[2][j+0] = row;
+ YUV_Planes[2][j+1] = row;
+ }
+
+ // Interleaving U and V
+ for (unsigned int i = 0; i < cinfo.output_height; i += 8) {
+ jpeg_read_raw_data(&cinfo, YUV_Planes, 8);
+ YUV_Planes[0] += 8;
+ YUV_Planes[1] += 8;
+ YUV_Planes[2] += 8;
+ }
+
+ unsigned char *uv_ptr = nv12_buffer + (stride * cinfo.output_height);
+ unsigned char *u_ptr = UV_Plane;
+ unsigned char *v_ptr = UV_Plane + (decoded_uv_buffer_size / 2);
+ for(unsigned int i = 0; i < cinfo.output_height / 2; i++){
+ for(unsigned int j = 0; j < cinfo.output_width; j+=2){
+ *(uv_ptr + j) = *u_ptr; u_ptr++;
+ *(uv_ptr + j + 1) = *v_ptr; v_ptr++;
+ }
+ uv_ptr = uv_ptr + stride;
+ }
+
+ jpeg_finish_decompress(&cinfo);
+ jpeg_destroy_decompress(&cinfo);
+
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/DrmMemoryManager.cpp b/camera/DrmMemoryManager.cpp
new file mode 100644
index 0000000..1f7a99b
--- /dev/null
+++ b/camera/DrmMemoryManager.cpp
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+
+extern "C" {
+
+//#include <timm_osal_interfaces.h>
+//#include <timm_osal_trace.h>
+
+
+};
+
+namespace Ti {
+namespace Camera {
+
+///@todo Move these constants to a common header file, preferably in tiler.h
+#define STRIDE_8BIT (4 * 1024)
+#define STRIDE_16BIT (4 * 1024)
+
+#define ALLOCATION_2D 2
+
+///Utility Macro Declarations
+
+/*--------------------MemoryManager Class STARTS here-----------------------------*/
+MemoryManager::MemoryManager() {
+ mOmapDrm_Fd = -1;
+ mOmapDev = NULL;
+}
+
+MemoryManager::~MemoryManager() {
+ omap_device_del(mOmapDev);
+ mOmapDev = NULL;
+ if ( mOmapDrm_Fd >= 0 ) {
+ close(mOmapDrm_Fd);
+ mOmapDrm_Fd = -1;
+ }
+}
+
+status_t MemoryManager::initialize() {
+
+ if (mOmapDev)
+ return 0;
+
+ /* Open omapdrm device */
+ if ( mOmapDrm_Fd == -1 ) {
+ mOmapDrm_Fd = open("/dev/dri/renderD128", O_RDWR, 0);
+ if ( mOmapDrm_Fd < 0 ) {
+ CAMHAL_LOGE("drmOpen() failed, error: %d", mOmapDrm_Fd);
+ mOmapDrm_Fd = -1;
+ return NO_INIT;
+ }
+ }
+ mOmapDev = omap_device_new(mOmapDrm_Fd);
+ if(mOmapDev == NULL) {
+ close(mOmapDrm_Fd);
+ return -1;
+ }
+
+ return OK;
+}
+
+CameraBuffer* MemoryManager::allocateBufferList(int width, int height, const char* format, int &size, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_ASSERT(mOmapDrm_Fd != -1);
+
+ ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
+ ///the buffers
+ const uint numArrayEntriesC = (uint)(numBufs+1);
+
+ ///Allocate a buffer array
+ CameraBuffer *buffers = new CameraBuffer [numArrayEntriesC];
+ if(!buffers) {
+ CAMHAL_LOGEB("Allocation failed when creating buffers array of %d CameraBuffer elements", numArrayEntriesC);
+ goto error;
+ }
+
+ ///Initialize the array with zeros - this will help us while freeing the array in case of error
+ ///If a value of an array element is NULL, it means we didnt allocate it
+ memset(buffers, 0, sizeof(CameraBuffer) * numArrayEntriesC);
+
+ //2D Allocations are not supported currently
+ if(size != 0) {
+ int dmaBufFd = -1;
+
+ ///1D buffers
+ for (int i = 0; i < numBufs; i++) {
+ void *data = NULL;
+
+ struct omap_bo *bo = omap_bo_new(mOmapDev, size + sizeof(CameraBuffer), OMAP_BO_WC);
+ if( !bo ) {
+ CAMHAL_LOGEB("FAILED to allocate DRM buffer of size=%d", size);
+ goto error;
+ }
+
+ data = omap_bo_map(bo);
+ if (data == NULL) {
+ CAMHAL_LOGEB("Userspace mapping of DRM buffers returned error %d", data);
+ goto error;
+ }
+
+ dmaBufFd = omap_bo_dmabuf(bo);
+ if ( dmaBufFd < 0) {
+ CAMHAL_LOGDB("DRM buffer share failed[%d]", dmaBufFd);
+ goto error;
+ }
+
+ buffers[i].type = CAMERA_BUFFER_DRM;
+ buffers[i].opaque = data;
+ buffers[i].mapped = data;
+ buffers[i].omapdrm_dev = mOmapDev;
+ buffers[i].omapdrm_bo = bo;
+ buffers[i].omapdrm_fd = mOmapDrm_Fd;
+ buffers[i].dma_buf_fd = dmaBufFd;
+ buffers[i].size = size;
+ buffers[i].format = CameraHal::getPixelFormatConstant(format);
+
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return buffers;
+
+error:
+
+ CAMHAL_LOGE("Freeing buffers already allocated after error occurred");
+ if(buffers)
+ freeBufferList(buffers);
+
+ if ( NULL != mErrorNotifier.get() )
+ mErrorNotifier->errorNotify(-ENOMEM);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+CameraBuffer* MemoryManager::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
+
+ return NULL;
+}
+
+//TODO: Get needed data to map tiler buffers
+//Return dummy data for now
+uint32_t * MemoryManager::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int MemoryManager::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+}
+
+int MemoryManager::freeBufferList(CameraBuffer *buffers)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ int i;
+
+ if(!buffers)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freebuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ i = 0;
+ while(buffers[i].type == CAMERA_BUFFER_DRM)
+ {
+ if(buffers[i].size)
+ {
+// munmap(buffers[i].opaque, buffers[i].size);
+ close(buffers[i].dma_buf_fd);
+ omap_bo_del(buffers[i].omapdrm_bo);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not a valid Memory Manager buffer");
+ }
+ i++;
+ }
+
+ if(i != 0)
+ {
+ delete [] buffers;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
+
+
+/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/camera/Encoder_libjpeg.cpp b/camera/Encoder_libjpeg.cpp
new file mode 100644
index 0000000..1cd09d3
--- /dev/null
+++ b/camera/Encoder_libjpeg.cpp
@@ -0,0 +1,549 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file Encoder_libjpeg.cpp
+*
+* This file encodes a YUV422I buffer to a jpeg
+* TODO(XXX): Need to support formats other than yuv422i
+* Change interface to pre/post-proc algo framework
+*
+*/
+
+#include "Encoder_libjpeg.h"
+#include "NV12_resize.h"
+#include "TICameraParameters.h"
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <errno.h>
+#include <math.h>
+
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define MIN(x,y) ((x < y) ? x : y)
+
+namespace Ti {
+namespace Camera {
+
+struct integer_string_pair {
+ unsigned int integer;
+ const char* string;
+};
+
+static integer_string_pair degress_to_exif_lut [] = {
+ // degrees, exif_orientation
+ {0, "1"},
+ {90, "6"},
+ {180, "3"},
+ {270, "8"},
+};
+struct libjpeg_destination_mgr : jpeg_destination_mgr {
+ libjpeg_destination_mgr(uint8_t* input, int size);
+
+ uint8_t* buf;
+ int bufsize;
+ size_t jpegsize;
+};
+
+static void libjpeg_init_destination (j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+
+ dest->next_output_byte = dest->buf;
+ dest->free_in_buffer = dest->bufsize;
+ dest->jpegsize = 0;
+}
+
+static boolean libjpeg_empty_output_buffer(j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+
+ dest->next_output_byte = dest->buf;
+ dest->free_in_buffer = dest->bufsize;
+ return TRUE; // ?
+}
+
+static void libjpeg_term_destination (j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+ dest->jpegsize = dest->bufsize - dest->free_in_buffer;
+}
+
+libjpeg_destination_mgr::libjpeg_destination_mgr(uint8_t* input, int size) {
+ this->init_destination = libjpeg_init_destination;
+ this->empty_output_buffer = libjpeg_empty_output_buffer;
+ this->term_destination = libjpeg_term_destination;
+
+ this->buf = input;
+ this->bufsize = size;
+
+ jpegsize = 0;
+}
+
+/* private static functions */
+static void nv21_to_yuv(uint8_t* dst, uint8_t* y, uint8_t* uv, int width) {
+ if (!dst || !y || !uv) {
+ return;
+ }
+
+ while ((width--) > 0) {
+ uint8_t y0 = y[0];
+ uint8_t v0 = uv[0];
+ uint8_t u0 = *(uv+1);
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst += 3;
+ y++;
+ if(!(width % 2)) uv+=2;
+ }
+}
+
+static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
+ if (!dst || !src) {
+ return;
+ }
+
+ if (width % 2) {
+ return; // not supporting odd widths
+ }
+
+#ifdef ARCH_ARM_HAVE_NEON
+ // currently, neon routine only supports multiple of 16 width
+ if ((width % 16) == 0) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = uv q1 = y \n\t"
+ " vuzp.8 q0, q2 @ d0 = u d4 = v \n\t"
+ " vmov d1, d0 @ q0 = u0u1u2..u0u1u2... \n\t"
+ " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
+ " vzip.8 d0, d1 @ q0 = u0u0u1u1u2u2... \n\t"
+ " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t"
+ " vswp q0, q1 @ now q0 = y q1 = u q2 = v \n\t"
+ " vst3.8 {d0,d2,d4},[%[dst]]! \n\t"
+ " vst3.8 {d1,d3,d5},[%[dst]]! \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ } else
+#endif
+ {
+ while ((width-=2) >= 0) {
+ uint8_t u0 = (src[0] >> 0) & 0xFF;
+ uint8_t y0 = (src[0] >> 8) & 0xFF;
+ uint8_t v0 = (src[0] >> 16) & 0xFF;
+ uint8_t y1 = (src[0] >> 24) & 0xFF;
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst[3] = y1;
+ dst[4] = u0;
+ dst[5] = v0;
+ dst += 6;
+ src++;
+ }
+ }
+}
+
+static void yuyv_to_yuv(uint8_t* dst, uint32_t* src, int width) {
+ if (!dst || !src) {
+ return;
+ }
+
+ if (width % 2) {
+ return; // not supporting odd widths
+ }
+
+#ifdef ARCH_ARM_HAVE_NEON
+ // currently, neon routine only supports multiple of 16 width
+ if ((width % 16) == 0) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " vuzp.8 q1, q2 @ d2 = u d4 = v \n\t"
+ " vmov d3, d2 @ q1 = u0u1u2..u0u1u2... \n\t"
+ " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
+ " vzip.8 d2, d3 @ q1 = u0u0u1u1u2u2... \n\t"
+ " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t"
+ " @ now q0 = y q1 = u q2 = v \n\t"
+ " vst3.8 {d0,d2,d4},[%[dst]]! \n\t"
+ " vst3.8 {d1,d3,d5},[%[dst]]! \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ } else
+#endif
+ {
+ while ((width-=2) >= 0) {
+ uint8_t y0 = (src[0] >> 0) & 0xFF;
+ uint8_t u0 = (src[0] >> 8) & 0xFF;
+ uint8_t y1 = (src[0] >> 16) & 0xFF;
+ uint8_t v0 = (src[0] >> 24) & 0xFF;
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst[3] = y1;
+ dst[4] = u0;
+ dst[5] = v0;
+ dst += 6;
+ src++;
+ }
+ }
+}
+
+static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
+ structConvImage o_img_ptr, i_img_ptr;
+
+ if (!params || !dst_buffer) {
+ return;
+ }
+
+ //input
+ i_img_ptr.uWidth = params->in_width;
+ i_img_ptr.uStride = i_img_ptr.uWidth;
+ i_img_ptr.uHeight = params->in_height;
+ i_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
+ i_img_ptr.imgPtr = (uint8_t*) params->src;
+ i_img_ptr.clrPtr = i_img_ptr.imgPtr + (i_img_ptr.uWidth * i_img_ptr.uHeight);
+ i_img_ptr.uOffset = 0;
+
+ //ouput
+ o_img_ptr.uWidth = params->out_width;
+ o_img_ptr.uStride = o_img_ptr.uWidth;
+ o_img_ptr.uHeight = params->out_height;
+ o_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
+ o_img_ptr.imgPtr = dst_buffer;
+ o_img_ptr.clrPtr = o_img_ptr.imgPtr + (o_img_ptr.uWidth * o_img_ptr.uHeight);
+ o_img_ptr.uOffset = 0;
+
+ VT_resizeFrame_Video_opt2_lp(&i_img_ptr, &o_img_ptr, NULL, 0);
+}
+
+/* public static functions */
+const char* ExifElementsTable::degreesToExifOrientation(unsigned int degrees) {
+ for (unsigned int i = 0; i < ARRAY_SIZE(degress_to_exif_lut); i++) {
+ if (degrees == degress_to_exif_lut[i].integer) {
+ return degress_to_exif_lut[i].string;
+ }
+ }
+ return NULL;
+}
+
+void ExifElementsTable::stringToRational(const char* str, unsigned int* num, unsigned int* den) {
+ int len;
+ char * tempVal = NULL;
+
+ if (str != NULL) {
+ len = strlen(str);
+ tempVal = (char*) malloc( sizeof(char) * (len + 1));
+ }
+
+ if (tempVal != NULL) {
+ // convert the decimal string into a rational
+ size_t den_len;
+ char *ctx;
+ unsigned int numerator = 0;
+ unsigned int denominator = 0;
+ char* temp = NULL;
+
+ memset(tempVal, '\0', len + 1);
+ strncpy(tempVal, str, len);
+ temp = strtok_r(tempVal, ".", &ctx);
+
+ if (temp != NULL)
+ numerator = atoi(temp);
+
+ if (!numerator)
+ numerator = 1;
+
+ temp = strtok_r(NULL, ".", &ctx);
+ if (temp != NULL) {
+ den_len = strlen(temp);
+ if(HUGE_VAL == den_len ) {
+ den_len = 0;
+ }
+
+ denominator = static_cast<unsigned int>(pow(10, den_len));
+ numerator = numerator * denominator + atoi(temp);
+ } else {
+ denominator = 1;
+ }
+
+ free(tempVal);
+
+ *num = numerator;
+ *den = denominator;
+ }
+}
+
+bool ExifElementsTable::isAsciiTag(const char* tag) {
+ // TODO(XXX): Add tags as necessary
+ return (strcmp(tag, TAG_GPS_PROCESSING_METHOD) == 0);
+}
+
+void ExifElementsTable::insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size) {
+ ReadMode_t read_mode = (ReadMode_t)(READ_METADATA | READ_IMAGE);
+
+ ResetJpgfile();
+ if (ReadJpegSectionsFromBuffer(jpeg, jpeg_size, read_mode)) {
+ jpeg_opened = true;
+#ifdef ANDROID_API_JB_OR_LATER
+ create_EXIF(table, exif_tag_count, gps_tag_count, has_datetime_tag);
+#else
+ create_EXIF(table, exif_tag_count, gps_tag_count);
+#endif
+ }
+}
+
+status_t ExifElementsTable::insertExifThumbnailImage(const char* thumb, int len) {
+ status_t ret = NO_ERROR;
+
+ if ((len > 0) && jpeg_opened) {
+ ret = ReplaceThumbnailFromBuffer(thumb, len) ? NO_ERROR : UNKNOWN_ERROR;
+ CAMHAL_LOGDB("insertExifThumbnailImage. ReplaceThumbnail(). ret=%d", ret);
+ }
+
+ return ret;
+}
+
+void ExifElementsTable::saveJpeg(unsigned char* jpeg, size_t jpeg_size) {
+ if (jpeg_opened) {
+ WriteJpegToBuffer(jpeg, jpeg_size);
+ DiscardData();
+ jpeg_opened = false;
+ }
+}
+
+/* public functions */
+ExifElementsTable::~ExifElementsTable() {
+ int num_elements = gps_tag_count + exif_tag_count;
+
+ for (int i = 0; i < num_elements; i++) {
+ if (table[i].Value) {
+ free(table[i].Value);
+ }
+ }
+
+ if (jpeg_opened) {
+ DiscardData();
+ }
+}
+
+status_t ExifElementsTable::insertElement(const char* tag, const char* value) {
+ unsigned int value_length = 0;
+ status_t ret = NO_ERROR;
+
+ if (!value || !tag) {
+ return -EINVAL;
+ }
+
+ if (position >= MAX_EXIF_TAGS_SUPPORTED) {
+ CAMHAL_LOGEA("Max number of EXIF elements already inserted");
+ return NO_MEMORY;
+ }
+
+ if (isAsciiTag(tag)) {
+ value_length = sizeof(ExifAsciiPrefix) + strlen(value + sizeof(ExifAsciiPrefix));
+ } else {
+ value_length = strlen(value);
+ }
+
+ if (IsGpsTag(tag)) {
+ table[position].GpsTag = TRUE;
+ table[position].Tag = GpsTagNameToValue(tag);
+ gps_tag_count++;
+ } else {
+ table[position].GpsTag = FALSE;
+ table[position].Tag = TagNameToValue(tag);
+ exif_tag_count++;
+
+ if (strcmp(tag, TAG_DATETIME) == 0) {
+#ifdef ANDROID_API_JB_OR_LATER
+ has_datetime_tag = true;
+#else
+ // jhead isn't taking datetime tag...this is a WA
+ ImageInfo.numDateTimeTags = 1;
+ memcpy(ImageInfo.DateTime, value,
+ MIN(ARRAY_SIZE(ImageInfo.DateTime), value_length + 1));
+#endif
+ }
+ }
+
+ table[position].DataLength = 0;
+ table[position].Value = (char*) malloc(sizeof(char) * (value_length + 1));
+
+ if (table[position].Value) {
+ memcpy(table[position].Value, value, value_length + 1);
+ table[position].DataLength = value_length + 1;
+ }
+
+ position++;
+ return ret;
+}
+
+/* private member functions */
+size_t Encoder_libjpeg::encode(params* input) {
+ jpeg_compress_struct cinfo;
+ jpeg_error_mgr jerr;
+ jpeg_destination_mgr jdest;
+ uint8_t* src = NULL, *resize_src = NULL;
+ uint8_t* row_tmp = NULL;
+ uint8_t* row_src = NULL;
+ uint8_t* row_uv = NULL; // used only for NV12
+ int out_width = 0, in_width = 0;
+ int out_height = 0, in_height = 0;
+ int bpp = 2; // for uyvy
+ int right_crop = 0, start_offset = 0;
+
+ if (!input) {
+ return 0;
+ }
+
+ out_width = input->out_width;
+ in_width = input->in_width;
+ out_height = input->out_height;
+ in_height = input->in_height;
+ right_crop = input->right_crop;
+ start_offset = input->start_offset;
+ src = input->src;
+ input->jpeg_size = 0;
+
+ libjpeg_destination_mgr dest_mgr(input->dst, input->dst_size);
+
+ // param check...
+ if ((in_width < 2) || (out_width < 2) || (in_height < 2) || (out_height < 2) ||
+ (src == NULL) || (input->dst == NULL) || (input->quality < 1) || (input->src_size < 1) ||
+ (input->dst_size < 1) || (input->format == NULL)) {
+ goto exit;
+ }
+
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ bpp = 1;
+ if ((in_width != out_width) || (in_height != out_height)) {
+ resize_src = (uint8_t*) malloc(input->dst_size);
+ resize_nv12(input, resize_src);
+ if (resize_src) src = resize_src;
+ }
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) &&
+ strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY)) {
+ // we currently only support yuv422i and yuv420sp
+ CAMHAL_LOGEB("Encoder: format not supported: %s", input->format);
+ goto exit;
+ } else if ((in_width != out_width) || (in_height != out_height)) {
+ CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
+ goto exit;
+ }
+
+ cinfo.err = jpeg_std_error(&jerr);
+
+ jpeg_create_compress(&cinfo);
+
+ CAMHAL_LOGDB("encoding... \n\t"
+ "width: %d \n\t"
+ "height:%d \n\t"
+ "dest %p \n\t"
+ "dest size:%d \n\t"
+ "mSrc %p \n\t"
+ "format: %s",
+ out_width, out_height, input->dst,
+ input->dst_size, src, input->format);
+
+ cinfo.dest = &dest_mgr;
+ cinfo.image_width = out_width - right_crop;
+ cinfo.image_height = out_height;
+ cinfo.input_components = 3;
+ cinfo.in_color_space = JCS_YCbCr;
+ cinfo.input_gamma = 1;
+
+ jpeg_set_defaults(&cinfo);
+ jpeg_set_quality(&cinfo, input->quality, TRUE);
+ cinfo.dct_method = JDCT_IFAST;
+
+ jpeg_start_compress(&cinfo, TRUE);
+
+ row_tmp = (uint8_t*)malloc((out_width - right_crop) * 3);
+ row_src = src + start_offset;
+ row_uv = src + out_width * out_height * bpp;
+
+ while ((cinfo.next_scanline < cinfo.image_height) && !mCancelEncoding) {
+ JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */
+
+ // convert input yuv format to yuv444
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ nv21_to_yuv(row_tmp, row_src, row_uv, out_width - right_crop);
+ } else if (strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY) == 0) {
+ uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ yuyv_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
+ }
+
+ row[0] = row_tmp;
+ jpeg_write_scanlines(&cinfo, row, 1);
+ row_src = row_src + out_width*bpp;
+
+ // move uv row if input format needs it
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (!(cinfo.next_scanline % 2))
+ row_uv = row_uv + out_width * bpp;
+ }
+ }
+
+ // no need to finish encoding routine if we are prematurely stopping
+ // we will end up crashing in dest_mgr since data is incomplete
+ if (!mCancelEncoding)
+ jpeg_finish_compress(&cinfo);
+ jpeg_destroy_compress(&cinfo);
+
+ if (resize_src) free(resize_src);
+ if (row_tmp) free(row_tmp);
+
+ exit:
+ input->jpeg_size = dest_mgr.jpegsize;
+ return dest_mgr.jpegsize;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/FrameDecoder.cpp b/camera/FrameDecoder.cpp
new file mode 100644
index 0000000..80b4946
--- /dev/null
+++ b/camera/FrameDecoder.cpp
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "FrameDecoder.h"
+
+
+namespace Ti {
+namespace Camera {
+
+FrameDecoder::FrameDecoder()
+: mCameraHal(NULL), mState(DecoderState_Uninitialized) {
+}
+
+FrameDecoder::~FrameDecoder() {
+}
+
+status_t FrameDecoder::start() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ status_t ret;
+ if (mState == DecoderState_Running) {
+ return NO_INIT;
+ }
+ ret = doStart();
+ if (ret == NO_ERROR) {
+ mState = DecoderState_Running;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void FrameDecoder::stop() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState >= DecoderState_Requested_Stop) {
+ return;
+ }
+ mState = DecoderState_Requested_Stop;
+ doStop();
+ mState = DecoderState_Stoppped;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::release() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doRelease();
+ mState = DecoderState_Uninitialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::flush() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doFlush();
+ mInQueue.clear();
+ mOutQueue.clear();
+
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::configure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState == DecoderState_Running) {
+ return;
+ }
+ mParams = params;
+ mInQueue.reserve(mParams.inputBufferCount);
+ mOutQueue.reserve(mParams.outputBufferCount);
+ doConfigure(params);
+ mState = DecoderState_Initialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t FrameDecoder::dequeueInputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(in->getLock());
+ if (in->getStatus() == BufferStatus_InDecoded) {
+ id = index;
+ in->setStatus(BufferStatus_Unknown);
+ mInQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::dequeueOutputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ if (out->getStatus() == BufferStatus_OutFilled) {
+ id = index;
+ out->setStatus(BufferStatus_Unknown);
+ mOutQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::queueOutputBuffer(int index) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ //We queue all available buffers to Decoder not in recording mode - before start
+ if (mState > DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ out->setStatus(BufferStatus_OutQueued);
+ mOutQueue.push_back(index);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t FrameDecoder::queueInputBuffer(int id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ {
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(id);
+ android::AutoMutex bufferLock(in->getLock());
+ in->setStatus(BufferStatus_InQueued);
+ mInQueue.push_back(id);
+ }
+
+ // Since we got queued buffer - we can process it
+ doProcessInputBuffer();
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/NV12_resize.c b/camera/NV12_resize.c
new file mode 100644
index 0000000..7f92fb2
--- /dev/null
+++ b/camera/NV12_resize.c
@@ -0,0 +1,307 @@
+#include "NV12_resize.h"
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_NDDEBUG 0
+
+#define LOG_TAG "NV12_resize"
+#define STRIDE 4096
+#include <utils/Log.h>
+
+/*==========================================================================
+* Function Name : VT_resizeFrame_Video_opt2_lp
+*
+* Description : Resize a yuv frame.
+*
+* Input(s) : input_img_ptr -> Input Image Structure
+* : output_img_ptr -> Output Image Structure
+* : cropout -> crop structure
+*
+* Value Returned : mmBool -> FALSE on error TRUE on success
+* NOTE:
+* Not tested for crop funtionallity.
+* faster version.
+============================================================================*/
+mmBool
+VT_resizeFrame_Video_opt2_lp
+(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ )
+{
+ ALOGV("VT_resizeFrame_Video_opt2_lp+");
+
+ mmUint16 row,col;
+ mmUint32 resizeFactorX;
+ mmUint32 resizeFactorY;
+
+
+ mmUint16 x, y;
+
+ mmUchar* ptr8;
+ mmUchar *ptr8Cb, *ptr8Cr;
+
+
+ mmUint16 xf, yf;
+ mmUchar* inImgPtrY;
+ mmUchar* inImgPtrU;
+ mmUchar* inImgPtrV;
+ mmUint32 cox, coy, codx, cody;
+ mmUint16 idx,idy, idxC;
+
+ if(i_img_ptr->uWidth == o_img_ptr->uWidth)
+ {
+ if(i_img_ptr->uHeight == o_img_ptr->uHeight)
+ {
+ ALOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
+ ALOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
+ ALOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
+ ALOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
+ ALOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
+ }
+ }
+
+ if (!i_img_ptr || !i_img_ptr->imgPtr ||
+ !o_img_ptr || !o_img_ptr->imgPtr)
+ {
+ ALOGE("Image Point NULL");
+ ALOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+
+ inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
+ inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
+ inImgPtrV = (mmUchar*)inImgPtrU + 1;
+
+ if (cropout == NULL)
+ {
+ cox = 0;
+ coy = 0;
+ codx = o_img_ptr->uWidth;
+ cody = o_img_ptr->uHeight;
+ }
+ else
+ {
+ cox = cropout->x;
+ coy = cropout->y;
+ codx = cropout->uWidth;
+ cody = cropout->uHeight;
+ }
+ idx = i_img_ptr->uWidth;
+ idy = i_img_ptr->uHeight;
+
+ /* make sure valid input size */
+ if (idx < 1 || idy < 1 || i_img_ptr->uStride < 1)
+ {
+ ALOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
+ ALOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+
+ resizeFactorX = ((idx-1)<<9) / codx;
+ resizeFactorY = ((idy-1)<<9) / cody;
+
+ if(i_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp &&
+ o_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp)
+ {
+ ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
+
+
+ ////////////////////////////for Y//////////////////////////
+ for (row=0; row < cody; row++)
+ {
+ mmUchar *pu8Yrow1 = NULL;
+ mmUchar *pu8Yrow2 = NULL;
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+ pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride;
+ pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride;
+
+ for (col=0; col < codx; col++)
+ {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8ptr1 = NULL;
+ mmUchar *pu8ptr2 = NULL;
+ mmUchar w;
+ mmUint16 accum_1;
+ //mmUint32 accum_W;
+
+
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+
+ //accum_W = 0;
+ accum_1 = 0;
+
+ pu8ptr1 = pu8Yrow1 + (x);
+ pu8ptr2 = pu8Yrow2 + (x);
+
+ /* A pixel */
+ //in = *(inImgPtrY + (y)*idx + (x));
+ in11 = *(pu8ptr1);
+
+ w = bWeights[xf][yf][0];
+ accum_1 = (w * in11);
+ //accum_W += (w);
+
+ /* B pixel */
+ //in = *(inImgPtrY + (y)*idx + (x+1));
+ in12 = *(pu8ptr1+1);
+ w = bWeights[xf][yf][1];
+ accum_1 += (w * in12);
+ //accum_W += (w);
+
+ /* C pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x));
+ in21 = *(pu8ptr2);
+ w = bWeights[xf][yf][3];
+ accum_1 += (w * in21);
+ //accum_W += (w);
+
+ /* D pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x+1));
+ in22 = *(pu8ptr2+1);
+ w = bWeights[xf][yf][2];
+ accum_1 += (w * in22);
+ //accum_W += (w);
+
+ /* divide by sum of the weights */
+ //accum_1 /= (accum_W);
+ //accum_1 = (accum_1/64);
+ accum_1 = (accum_1>>6);
+ *ptr8 = (mmUchar)accum_1 ;
+
+
+ ptr8++;
+ }
+ ptr8 = ptr8 + (o_img_ptr->uStride - codx);
+ }
+ ////////////////////////////for Y//////////////////////////
+
+ ///////////////////////////////for Cb-Cr//////////////////////
+
+ ptr8Cb = (mmUchar*)o_img_ptr->clrPtr + cox + coy*o_img_ptr->uWidth;
+
+ ptr8Cr = (mmUchar*)(ptr8Cb+1);
+
+ idxC = (idx>>1);
+ for (row=0; row < (((cody)>>1)); row++)
+ {
+ mmUchar *pu8Cbr1 = NULL;
+ mmUchar *pu8Cbr2 = NULL;
+ mmUchar *pu8Crr1 = NULL;
+ mmUchar *pu8Crr2 = NULL;
+
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+
+ pu8Cbr1 = inImgPtrU + (y) * i_img_ptr->uStride;
+ pu8Cbr2 = pu8Cbr1 + i_img_ptr->uStride;
+ pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride;
+ pu8Crr2 = pu8Crr1 + i_img_ptr->uStride;
+
+ for (col=0; col < (((codx)>>1)); col++)
+ {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8Cbc1 = NULL;
+ mmUchar *pu8Cbc2 = NULL;
+ mmUchar *pu8Crc1 = NULL;
+ mmUchar *pu8Crc2 = NULL;
+
+ mmUchar w;
+ mmUint16 accum_1Cb, accum_1Cr;
+ //mmUint32 accum_WCb, accum_WCr;
+
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+
+ //accum_WCb = accum_WCr = 0;
+ accum_1Cb = accum_1Cr = 0;
+
+ pu8Cbc1 = pu8Cbr1 + (x*2);
+ pu8Cbc2 = pu8Cbr2 + (x*2);
+ pu8Crc1 = pu8Crr1 + (x*2);
+ pu8Crc2 = pu8Crr2 + (x*2);
+
+
+
+ /* A pixel */
+ w = bWeights[xf][yf][0];
+
+ in11 = *(pu8Cbc1);
+ accum_1Cb = (w * in11);
+ // accum_WCb += (w);
+
+ in11 = *(pu8Crc1);
+ accum_1Cr = (w * in11);
+ //accum_WCr += (w);
+
+ /* B pixel */
+ w = bWeights[xf][yf][1];
+
+ in12 = *(pu8Cbc1+2);
+ accum_1Cb += (w * in12);
+ //accum_WCb += (w);
+
+ in12 = *(pu8Crc1+2);
+ accum_1Cr += (w * in12);
+ //accum_WCr += (w);
+
+ /* C pixel */
+ w = bWeights[xf][yf][3];
+
+ in21 = *(pu8Cbc2);
+ accum_1Cb += (w * in21);
+ //accum_WCb += (w);
+
+ in21 = *(pu8Crc2);
+ accum_1Cr += (w * in21);
+ //accum_WCr += (w);
+
+ /* D pixel */
+ w = bWeights[xf][yf][2];
+
+ in22 = *(pu8Cbc2+2);
+ accum_1Cb += (w * in22);
+ //accum_WCb += (w);
+
+ in22 = *(pu8Crc2+2);
+ accum_1Cr += (w * in22);
+ //accum_WCr += (w);
+
+ /* divide by sum of the weights */
+ //accum_1Cb /= (accum_WCb);
+ accum_1Cb = (accum_1Cb>>6);
+ *ptr8Cb = (mmUchar)accum_1Cb ;
+
+
+ accum_1Cr = (accum_1Cr >> 6);
+ *ptr8Cr = (mmUchar)accum_1Cr ;
+
+ ptr8Cb++;
+ ptr8Cr++;
+
+ ptr8Cb++;
+ ptr8Cr++;
+ }
+ ptr8Cb = ptr8Cb + (o_img_ptr->uStride-codx);
+ ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx);
+ }
+ ///////////////////For Cb- Cr////////////////////////////////////////
+ }
+ else
+ {
+ ALOGE("eFormat not supported");
+ ALOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+ ALOGV("success");
+ ALOGV("VT_resizeFrame_Video_opt2_lp-");
+ return TRUE;
+}
diff --git a/camera/NV12_resize.cpp b/camera/NV12_resize.cpp
new file mode 100644
index 0000000..971ee38
--- /dev/null
+++ b/camera/NV12_resize.cpp
@@ -0,0 +1,290 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "NV12_resize.h"
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+#define LOG_TAG "NV12_resize"
+
+#define STRIDE 4096
+
+/*==========================================================================
+* Function Name : VT_resizeFrame_Video_opt2_lp
+*
+* Description : Resize a yuv frame.
+*
+* Input(s) : input_img_ptr -> Input Image Structure
+* : output_img_ptr -> Output Image Structure
+* : cropout -> crop structure
+*
+* Value Returned : mmBool -> FALSE on error TRUE on success
+* NOTE:
+* Not tested for crop funtionallity.
+* faster version.
+============================================================================*/
+mmBool
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ ) {
+ LOG_FUNCTION_NAME;
+
+ mmUint16 row,col;
+ mmUint32 resizeFactorX;
+ mmUint32 resizeFactorY;
+
+ mmUint16 x, y;
+
+ mmUchar* ptr8;
+ mmUchar *ptr8Cb, *ptr8Cr;
+
+ mmUint16 xf, yf;
+ mmUchar* inImgPtrY;
+ mmUchar* inImgPtrU;
+ mmUchar* inImgPtrV;
+ mmUint32 cox, coy, codx, cody;
+ mmUint16 idx,idy, idxC;
+
+ if ( i_img_ptr->uWidth == o_img_ptr->uWidth ) {
+ if ( i_img_ptr->uHeight == o_img_ptr->uHeight ) {
+ CAMHAL_LOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
+ CAMHAL_LOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
+ CAMHAL_LOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
+ }
+ }
+
+ if ( !i_img_ptr || !i_img_ptr->imgPtr || !o_img_ptr || !o_img_ptr->imgPtr ) {
+ CAMHAL_LOGE("Image Point NULL");
+ return false;
+ }
+
+ inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
+ inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
+ inImgPtrV = (mmUchar*)inImgPtrU + 1;
+
+ if ( !cropout ) {
+ cox = 0;
+ coy = 0;
+ codx = o_img_ptr->uWidth;
+ cody = o_img_ptr->uHeight;
+ } else {
+ cox = cropout->x;
+ coy = cropout->y;
+ codx = cropout->uWidth;
+ cody = cropout->uHeight;
+ }
+ idx = i_img_ptr->uWidth;
+ idy = i_img_ptr->uHeight;
+
+ /* make sure valid input size */
+ if ( idx < 1 || idy < 1 || i_img_ptr->uStride < 1 ) {
+ CAMHAL_LOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
+ return false;
+ }
+
+ resizeFactorX = ((idx-1)<<9) / codx;
+ resizeFactorY = ((idy-1)<<9) / cody;
+
+ if( i_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ||
+ o_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ) {
+ CAMHAL_LOGE("eFormat not supported");
+ return false;
+ }
+
+ ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
+
+ ////////////////////////////for Y//////////////////////////
+ for ( row = 0; row < cody; row++ ) {
+ mmUchar *pu8Yrow1 = NULL;
+ mmUchar *pu8Yrow2 = NULL;
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+ pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride;
+ pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride;
+
+ for ( col = 0; col < codx; col++ ) {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8ptr1 = NULL;
+ mmUchar *pu8ptr2 = NULL;
+ mmUchar w;
+ mmUint16 accum_1;
+ //mmUint32 accum_W;
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+ //accum_W = 0;
+ accum_1 = 0;
+
+ pu8ptr1 = pu8Yrow1 + (x);
+ pu8ptr2 = pu8Yrow2 + (x);
+
+ /* A pixel */
+ //in = *(inImgPtrY + (y)*idx + (x));
+ in11 = *(pu8ptr1);
+
+ w = bWeights[xf][yf][0];
+ accum_1 = (w * in11);
+ //accum_W += (w);
+
+ /* B pixel */
+ //in = *(inImgPtrY + (y)*idx + (x+1));
+ in12 = *(pu8ptr1+1);
+ w = bWeights[xf][yf][1];
+ accum_1 += (w * in12);
+ //accum_W += (w);
+
+ /* C pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x));
+ in21 = *(pu8ptr2);
+ w = bWeights[xf][yf][3];
+ accum_1 += (w * in21);
+ //accum_W += (w);
+
+ /* D pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x+1));
+ in22 = *(pu8ptr2+1);
+ w = bWeights[xf][yf][2];
+ accum_1 += (w * in22);
+ //accum_W += (w);
+
+ /* divide by sum of the weights */
+ //accum_1 /= (accum_W);
+ //accum_1 = (accum_1/64);
+ accum_1 = (accum_1>>6);
+ *ptr8 = (mmUchar)accum_1 ;
+
+ ptr8++;
+ }
+ ptr8 = ptr8 + (o_img_ptr->uStride - codx);
+ }
+ ////////////////////////////for Y//////////////////////////
+
+ ///////////////////////////////for Cb-Cr//////////////////////
+
+ ptr8Cb = (mmUchar*)o_img_ptr->clrPtr + cox + coy*o_img_ptr->uWidth;
+
+ ptr8Cr = (mmUchar*)(ptr8Cb+1);
+
+ idxC = (idx>>1);
+ for ( row = 0; row < (((cody)>>1)); row++ ) {
+ mmUchar *pu8Cbr1 = NULL;
+ mmUchar *pu8Cbr2 = NULL;
+ mmUchar *pu8Crr1 = NULL;
+ mmUchar *pu8Crr2 = NULL;
+
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+
+ pu8Cbr1 = inImgPtrU + (y) * i_img_ptr->uStride;
+ pu8Cbr2 = pu8Cbr1 + i_img_ptr->uStride;
+ pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride;
+ pu8Crr2 = pu8Crr1 + i_img_ptr->uStride;
+
+ for ( col = 0; col < (((codx)>>1)); col++ ) {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8Cbc1 = NULL;
+ mmUchar *pu8Cbc2 = NULL;
+ mmUchar *pu8Crc1 = NULL;
+ mmUchar *pu8Crc2 = NULL;
+
+ mmUchar w;
+ mmUint16 accum_1Cb, accum_1Cr;
+ //mmUint32 accum_WCb, accum_WCr;
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+ //accum_WCb = accum_WCr = 0;
+ accum_1Cb = accum_1Cr = 0;
+
+ pu8Cbc1 = pu8Cbr1 + (x*2);
+ pu8Cbc2 = pu8Cbr2 + (x*2);
+ pu8Crc1 = pu8Crr1 + (x*2);
+ pu8Crc2 = pu8Crr2 + (x*2);
+
+ /* A pixel */
+ w = bWeights[xf][yf][0];
+
+ in11 = *(pu8Cbc1);
+ accum_1Cb = (w * in11);
+ // accum_WCb += (w);
+
+ in11 = *(pu8Crc1);
+ accum_1Cr = (w * in11);
+ //accum_WCr += (w);
+
+ /* B pixel */
+ w = bWeights[xf][yf][1];
+
+ in12 = *(pu8Cbc1+2);
+ accum_1Cb += (w * in12);
+ //accum_WCb += (w);
+
+ in12 = *(pu8Crc1+2);
+ accum_1Cr += (w * in12);
+ //accum_WCr += (w);
+
+ /* C pixel */
+ w = bWeights[xf][yf][3];
+
+ in21 = *(pu8Cbc2);
+ accum_1Cb += (w * in21);
+ //accum_WCb += (w);
+
+ in21 = *(pu8Crc2);
+ accum_1Cr += (w * in21);
+ //accum_WCr += (w);
+
+ /* D pixel */
+ w = bWeights[xf][yf][2];
+
+ in22 = *(pu8Cbc2+2);
+ accum_1Cb += (w * in22);
+ //accum_WCb += (w);
+
+ in22 = *(pu8Crc2+2);
+ accum_1Cr += (w * in22);
+ //accum_WCr += (w);
+
+ /* divide by sum of the weights */
+ //accum_1Cb /= (accum_WCb);
+ accum_1Cb = (accum_1Cb>>6);
+ *ptr8Cb = (mmUchar)accum_1Cb ;
+
+ accum_1Cr = (accum_1Cr >> 6);
+ *ptr8Cr = (mmUchar)accum_1Cr ;
+
+ ptr8Cb++;
+ ptr8Cr++;
+
+ ptr8Cb++;
+ ptr8Cr++;
+ }
+ ptr8Cb = ptr8Cb + (o_img_ptr->uStride-codx);
+ ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx);
+ }
+ ///////////////////For Cb- Cr////////////////////////////////////////
+
+ CAMHAL_LOGV("success");
+ return true;
+}
diff --git a/camera/OmxFrameDecoder.cpp b/camera/OmxFrameDecoder.cpp
new file mode 100644
index 0000000..be794e5
--- /dev/null
+++ b/camera/OmxFrameDecoder.cpp
@@ -0,0 +1,1077 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ErrorUtils.h"
+#include "OmxFrameDecoder.h"
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Index.h"
+#include "Decoder_libjpeg.h"
+
+
+namespace Ti {
+namespace Camera {
+
+const static uint32_t kMaxColorFormatSupported = 1000;
+const static int kMaxStateSwitchTimeOut = 1 * 1000 * 1000 * 1000; // 1 sec
+
+static const char* gDecoderRole[2] = {"video_decoder.mjpeg", "video_decoder.avc"};
+static const OMX_VIDEO_CODINGTYPE gCompressionFormat[2] = {OMX_VIDEO_CodingMJPEG, OMX_VIDEO_CodingAVC};
+
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+
+
+CallbackDispatcher::CallbackDispatcher()
+: mDone(false) {
+ mThread = new CallbackDispatcherThread(this);
+ mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
+}
+
+CallbackDispatcher::~CallbackDispatcher() {
+ {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mDone = true;
+ mQueueChanged.signal();
+ }
+
+ status_t status = mThread->join();
+ if (status != WOULD_BLOCK) {
+ //CAMHAL_ASSERT(status, (status_t)NO_ERROR);
+ }
+}
+
+void CallbackDispatcher::post(const OmxMessage &msg) {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mQueue.push_back(msg);
+ mQueueChanged.signal();
+}
+
+void CallbackDispatcher::dispatch(const OmxMessage &msg) {
+
+ switch(msg.type)
+ {
+ case OmxMessage::EVENT :
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.eventData.appData)->eventHandler(msg.u.eventData.event, msg.u.eventData.data1, msg.u.eventData.data2, msg.u.eventData.pEventData);
+ break;
+ }
+
+ case OmxMessage::EMPTY_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->emptyBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+
+ case OmxMessage::FILL_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->fillBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+ };
+}
+
+bool CallbackDispatcher::loop() {
+ for (;;) {
+ OmxMessage msg;
+
+ {
+ android::Mutex::Autolock autoLock(mLock);
+ while (!mDone && mQueue.empty()) {
+ mQueueChanged.wait(mLock);
+ }
+
+ if (mDone) {
+ break;
+ }
+
+ msg = *mQueue.begin();
+ mQueue.erase(mQueue.begin());
+ }
+
+ dispatch(msg);
+ }
+
+ return false;
+}
+
+bool CallbackDispatcherThread::threadLoop() {
+ return mDispatcher->loop();
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EVENT;
+ msg.u.eventData.appData = appData;
+ msg.u.eventData.event = event;
+ msg.u.eventData.data1 = data1;
+ msg.u.eventData.data2 = data2;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EMPTY_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::FILL_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+OmxFrameDecoder::OmxFrameDecoder(DecoderType type)
+ : mOmxInialized(false), mCurrentState(OmxDecoderState_Unloaded), mPreviousState(OmxDecoderState_Unloaded),
+ mStopping(false), mDecoderType(type), mIsNeedCheckDHT(true), mAlwaysAppendDHT(false) {
+}
+
+OmxFrameDecoder::~OmxFrameDecoder() {
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mHwLock);
+
+ int bufferIndex = reinterpret_cast<int>(pBuffHead->pAppPrivate);
+ CAMHAL_LOGD("Got header %p id = %d", pBuffHead, bufferIndex);
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(bufferIndex);
+
+ android::AutoMutex itemLock(in->getLock());
+ in->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_InDecoded : BufferStatus_InQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mHwLock);
+
+ int index = (int)pBuffHead->pAppPrivate;
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+
+ android::AutoMutex itemLock(out->getLock());
+ CameraBuffer* frame = static_cast<CameraBuffer*>(out->buffer);
+ out->setOffset(pBuffHead->nOffset);
+ out->setTimestamp(pBuffHead->nTimeStamp);
+ out->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_OutFilled : BufferStatus_OutQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ android::AutoMutex lock(mHwLock);
+
+ switch(event) {
+
+ case OMX_EventCmdComplete:
+ {
+ if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateIdle)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateIdle\n");
+ commitState(OmxDecoderState_Idle);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateExecuting)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateExecuting\n");
+ commitState(OmxDecoderState_Executing);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateLoaded)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateLoaded\n");
+ if(getOmxState() == OmxDecoderState_Executing)
+ commitState(OmxDecoderState_Loaded);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandFlush) {
+ CAMHAL_LOGD("OMX_CommandFlush done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortDisable) {
+ CAMHAL_LOGD("OMX_CommandPortDisable done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortEnable) {
+ CAMHAL_LOGD("OMX_CommandPortEnable done on %d port\n", data2);
+ mStateCondition.signal();
+ } else {
+ CAMHAL_LOGD("Event %d done on %d port\n", data1, data2);
+ }
+ break;
+ }
+ case OMX_EventError:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Component reported an Error!!!! 0x%x 0x%x\n\n\n", data1, data2);
+ commitState(OmxDecoderState_Error);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateInvalid);
+ mStateCondition.signal();
+ break;
+ }
+ case OMX_EventPortSettingsChanged:
+ {
+ CAMHAL_LOGD("\n\n\nOMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)\n\n\n",
+ data1, data2);
+ if (data2 == 0) {
+ // This means that some serious change to port happens
+ commitState(OmxDecoderState_Reconfigure);
+ } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+#if 0
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = PortIndexOutput;
+ status_t ret = omxGetConfig(OMX_IndexConfigCommonOutputCrop, &rect);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't get new crop parameters 0x%x", ret);
+ break;
+ }
+
+ CAMHAL_LOGV("Crop should change to %d %d %d %d", rect.nLeft, rect.nTop, rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight);
+#endif
+ }
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Unhandelled event ID=0x%x!!!!\n\n\n", event);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+ }
+
+void OmxFrameDecoder::doConfigure(const DecoderParameters& config) {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::enableGrallockHandles() {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ InitOMXParams(&domxUseGrallocHandles);
+
+ domxUseGrallocHandles.nPortIndex = PortIndexOutput;
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ return omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+}
+
+status_t OmxFrameDecoder::omxSwitchToExecutingSync() {
+ CAMHAL_LOGV("Try set OMX_StateExecuting");
+ android::AutoMutex lock(mHwLock);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateExecuting);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to EXECUTING ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::dumpPortSettings(PortType port) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = port;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ omxDumpPortSettings(def);
+}
+
+status_t OmxFrameDecoder::disablePortSync(int port) {
+ OMX_ERRORTYPE eError;
+ android::AutoMutex lock(mHwLock);
+ eError = OMX_SendCommand(mHandleComp, OMX_CommandPortDisable, port, NULL);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_CommandPortDisable OMX_ALL returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::enablePortSync(int port) {
+ android::AutoMutex lock(mHwLock);
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, OMX_CommandPortEnable, port, NULL);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand OMX_CommandPortEnable OUT returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::doPortReconfigure() {
+ OMX_ERRORTYPE eError;
+ status_t ret = NO_ERROR;
+
+ CAMHAL_LOGD("Starting port reconfiguration !");
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ android::AutoMutex lock(mHwLock);
+
+ omxSendCommand(OMX_CommandFlush, PortIndexOutput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ omxSendCommand(OMX_CommandFlush, PortIndexInput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ ret = omxSendCommand(OMX_CommandPortDisable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_CommandPortDisable PortIndexOutput returned error 0x%x", ret);
+ return ret;
+ }
+
+ freeBuffersOnOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+
+ ret = omxSendCommand(OMX_CommandPortEnable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable returned error 0x%x", ret);
+ return ret;
+ }
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable timeout 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Port reconfiguration DONE!");
+ //dumpPortSettings(PortIndexOutput);
+
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::queueOutputBuffers() {
+
+ LOG_FUNCTION_NAME;
+
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer> &outBuffer = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(outBuffer->getLock());
+ if (outBuffer->getStatus() == BufferStatus_OutQueued) {
+ outBuffer->setStatus(BufferStatus_OutWaitForFill);
+ CameraBuffer* frame = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_BUFFERHEADERTYPE *pOutBufHdr = mOutBufferHeaders[outBuffer->bufferId];
+ CAMHAL_LOGV("Fill this buffer cf=%p bh=%p id=%d", frame, pOutBufHdr, outBuffer->bufferId);
+ status_t status = omxFillThisBuffer(pOutBufHdr);
+ CAMHAL_ASSERT(status == NO_ERROR);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doProcessInputBuffer() {
+
+ LOG_FUNCTION_NAME;
+
+ if (getOmxState() == OmxDecoderState_Reconfigure) {
+ if (doPortReconfigure() == NO_ERROR) {
+ commitState(OmxDecoderState_Executing);
+ queueOutputBuffers();
+ } else {
+ commitState(OmxDecoderState_Error);
+ return;
+ }
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+ CAMHAL_ASSERT(omxSwitchToExecutingSync() == NO_ERROR);
+ queueOutputBuffers();
+ }
+
+ if (getOmxState() == OmxDecoderState_Executing) {
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ CAMHAL_LOGD("Got in inqueue[%d] buffer id=%d", i, index);
+ android::sp<MediaBuffer> &inBuffer = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ if (inBuffer->getStatus() == BufferStatus_InQueued) {
+ OMX_BUFFERHEADERTYPE *pInBufHdr = mInBufferHeaders[index];
+ inBuffer->setStatus(BufferStatus_InWaitForEmpty);
+ omxEmptyThisBuffer(inBuffer, pInBufHdr);
+ }
+ }
+ queueOutputBuffers();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::omxInit() {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
+ }
+ else mOmxInialized = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr) {
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ pOutBufHdr->nFilledLen = 0;
+ pOutBufHdr->nOffset = 0;
+ pOutBufHdr->nFlags = 0;
+
+ eError = OMX_FillThisBuffer(mHandleComp, pOutBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_FillThisBuffer ERROR 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+
+status_t OmxFrameDecoder::omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ OMX_CALLBACKTYPE & callbacks) {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.DECODER", pAppData, &callbacks);
+ if((eError != OMX_ErrorNone) || (handle == NULL)) {
+ handle = NULL;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ CAMHAL_LOGD("Founded id for empty is %d ", inBuffer->bufferId);
+ if (inBuffer->filledLen > def.nBufferSize) {
+ CAMHAL_LOGE("Can't copy IN buffer due to it too small %d than needed %d", def.nBufferSize, inBuffer->filledLen);
+ return UNKNOWN_ERROR;
+ }
+
+ int filledLen = inBuffer->filledLen;
+ unsigned char* dataBuffer = reinterpret_cast<unsigned char*>(inBuffer->buffer);
+
+ //If decoder type MJPEG we check if append DHT forced and if true append it
+ //in other case we check mIsNeedCheckDHT and if true search for DHT in buffer
+ //if we don't found it - will do append
+ //once we find that buffer not contain DHT we will append it each time
+ if ((mDecoderType == DecoderType_MJPEG) && ((mAlwaysAppendDHT) || ((mIsNeedCheckDHT) &&
+ (mIsNeedCheckDHT = !Decoder_libjpeg::isDhtExist(dataBuffer, filledLen))))) {
+ CAMHAL_LOGV("Will append DHT to buffer");
+ Decoder_libjpeg::appendDHT(dataBuffer, filledLen, pInBufHdr->pBuffer, filledLen + Decoder_libjpeg::readDHTSize());
+ filledLen += Decoder_libjpeg::readDHTSize();
+ mIsNeedCheckDHT = false;
+ mAlwaysAppendDHT = true;
+ } else {
+ memcpy(pInBufHdr->pBuffer, dataBuffer, filledLen);
+ }
+
+ CAMHAL_LOGV("Copied %d bytes into In buffer with bh=%p", filledLen, pInBufHdr);
+ CAMHAL_LOGV("Empty this buffer id=%d timestamp %lld offset=%d", inBuffer->bufferId, pInBufHdr->nTimeStamp, pInBufHdr->nOffset);
+ pInBufHdr->nFilledLen = filledLen;
+ pInBufHdr->nTimeStamp = inBuffer->getTimestamp();
+ pInBufHdr->nFlags = 16;
+ pInBufHdr->nOffset = 0;
+ eError = OMX_EmptyThisBuffer(mHandleComp, pInBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_EmptyThisBuffer ERROR 0x%x", eError);
+ Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::allocateBuffersOutput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+
+ OMX_BUFFERHEADERTYPE *pOutBufHdr;
+ mOutBufferHeaders.clear();
+ for (size_t i = 0; i < mOutBuffers->size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(i);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* cb = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_U8 * outPtr = static_cast<OMX_U8*>(camera_buffer_get_omx_ptr(cb));
+ CAMHAL_LOGV("Try to set OMX_UseBuffer [0x%x] for output port with length %d ", outPtr, def.nBufferSize);
+ eError = OMX_UseBuffer(mHandleComp, &pOutBufHdr, PortIndexOutput, (void*)i, def.nBufferSize, outPtr);
+
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Got buffer header %p", pOutBufHdr);
+ mOutBufferHeaders.add(pOutBufHdr);
+ }
+
+ omxDumpPortSettings(def);
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+
+}
+
+status_t OmxFrameDecoder::allocateBuffersInput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_BUFFERHEADERTYPE *pInBufHdr;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ // TODO: Will be changed since port reconfiguration will be handled
+ def.nBufferCountActual = mInBuffers->size();
+ def.bEnabled = OMX_TRUE;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ mInBufferHeaders.clear();
+
+ for (size_t i = 0; i < mInBuffers->size(); i++) {
+ CAMHAL_LOGD("Will do OMX_AllocateBuffer for input port with size %d id=%d", def.nBufferSize, i);
+ eError = OMX_AllocateBuffer(mHandleComp, &pInBufHdr, PortIndexInput, (void*)i, def.nBufferSize);
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+ CAMHAL_LOGD("Got new buffer header [%p] for IN port", pInBufHdr);
+ mInBufferHeaders.push_back(pInBufHdr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::getAndConfigureDecoder() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+
+ ret = omxInit();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_Init returned error 0x%x", ret);
+ return ret;
+ }
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = OmxFrameDecoder::eventCallback;
+ callbacks.EmptyBufferDone = OmxFrameDecoder::emptyBufferDoneCallback;
+ callbacks.FillBufferDone = OmxFrameDecoder::fillBufferDoneCallback;
+ ret = omxGetHandle(&mHandleComp, this, callbacks);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_GetHandle returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ ret = setComponentRole();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setComponentRole returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ disablePortSync(PortIndexOutput);
+ ret = setVideoOutputFormat(mParams.width, mParams.height);
+ enablePortSync(PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't set output format error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ enableGrallockHandles();
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::switchToIdle() {
+ CAMHAL_ASSERT(getOmxState() == OmxDecoderState_Loaded);
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ android::AutoMutex lock(mHwLock);
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+
+ allocateBuffersInput();
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ return ret;
+ }
+ commitState(OmxDecoderState_Idle);
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::doStart() {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ mStopping = false;
+ OMX_ERRORTYPE eError;
+
+ ret = getAndConfigureDecoder();
+
+#if 0
+ OMX_TI_PARAM_ENHANCEDPORTRECONFIG tParamStruct;
+ tParamStruct.nSize = sizeof(OMX_TI_PARAM_ENHANCEDPORTRECONFIG);
+ tParamStruct.nVersion.s.nVersionMajor = 0x1;
+ tParamStruct.nVersion.s.nVersionMinor = 0x1;
+ tParamStruct.nVersion.s.nRevision = 0x0;
+ tParamStruct.nVersion.s.nStep = 0x0;
+ tParamStruct.nPortIndex = PortIndexOutput;
+ tParamStruct.bUsePortReconfigForCrop = OMX_TRUE;
+ tParamStruct.bUsePortReconfigForPadding = OMX_FALSE;
+ omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexParamUseEnhancedPortReconfig, &tParamStruct);
+#endif
+
+ // Transition to IDLE
+ ret = switchToIdle();
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t OmxFrameDecoder::omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, cmd, param, NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setVideoOutputFormat(OMX_U32 width, OMX_U32 height) {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height);
+
+ OMX_VIDEO_CODINGTYPE compressionFormat = gCompressionFormat[mDecoderType];
+
+ status_t err = setVideoPortFormatType(
+ PortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != NO_ERROR) {
+ CAMHAL_LOGE("Error during setVideoPortFormatType 0x%x", err);
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE odef;
+ OMX_VIDEO_PORTDEFINITIONTYPE *out_video_def = &odef.format.video;
+
+ InitOMXParams(&odef);
+ odef.nPortIndex = PortIndexOutput;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &odef);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ out_video_def->nFrameWidth = width;
+ out_video_def->nFrameHeight = height;
+ out_video_def->xFramerate = 30<< 16;//((width >= 720) ? 60 : 30) << 16;
+ out_video_def->nStride = 4096;
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &odef);
+ CAMHAL_LOGD("OUT port is configured");
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return err;
+}
+
+status_t OmxFrameDecoder::setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = portIndex;
+ format.nIndex = 0;
+ bool found = false;
+
+ OMX_U32 index = 0;
+ for (;;) {
+ CAMHAL_LOGV("Will check index = %d", index);
+ format.nIndex = index;
+ OMX_ERRORTYPE eError = OMX_GetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ CAMHAL_LOGV("format.eCompressionFormat=0x%x format.eColorFormat=0x%x", format.eCompressionFormat, format.eColorFormat);
+
+ if (format.eCompressionFormat == compressionFormat
+ && format.eColorFormat == colorFormat) {
+ found = true;
+ break;
+ }
+
+ ++index;
+ if (index >= kMaxColorFormatSupported) {
+ CAMHAL_LOGE("color format %d or compression format %d is not supported",
+ colorFormat, compressionFormat);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (!found) {
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGV("found a match.");
+ OMX_ERRORTYPE eError = OMX_SetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setComponentRole() {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ const char *role = gDecoderRole[mDecoderType];
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ return omxSetParameter(OMX_IndexParamStandardComponentRole, &roleParams);
+}
+
+void OmxFrameDecoder::freeBuffersOnOutput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mOutBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mOutBufferHeaders[i];
+ CAMHAL_LOGD("Freeing OUT buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexOutput, header);
+ }
+ mOutBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::freeBuffersOnInput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mInBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mInBufferHeaders[i];
+ CAMHAL_LOGD("Freeing IN buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexInput, header);
+ }
+ mInBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doStop() {
+ LOG_FUNCTION_NAME;
+
+ mStopping = true;
+ android::AutoMutex lock(mHwLock);
+
+ CAMHAL_LOGD("HwFrameDecoder::doStop state id=%d", getOmxState());
+
+ if ((getOmxState() == OmxDecoderState_Executing) || (getOmxState() == OmxDecoderState_Reconfigure)) {
+
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ }
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Idle);
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+
+ CAMHAL_LOGD("Try set OMX_StateLoaded");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateLoaded);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ return;
+ }
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Error) {
+ CAMHAL_LOGD("In state ERROR will try to free buffers!");
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ }
+
+ CAMHAL_LOGD("Before OMX_FreeHandle ....");
+ OMX_FreeHandle(mHandleComp);
+ CAMHAL_LOGD("After OMX_FreeHandle ....");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doFlush() {
+ LOG_FUNCTION_NAME;
+ mIsNeedCheckDHT = true;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doRelease() {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def) {
+ CAMHAL_LOGD("----------Port settings start--------------------");
+ CAMHAL_LOGD("nSize=%d nPortIndex=%d eDir=%d nBufferCountActual=%d", def.nSize, def.nPortIndex, def.eDir, def.nBufferCountActual);
+ CAMHAL_LOGD("nBufferCountMin=%d nBufferSize=%d bEnabled=%d bPopulated=%d bBuffersContiguous=%d nBufferAlignment=%d", def.nBufferCountMin, def.nBufferSize, def.bEnabled, def.bPopulated, def.bBuffersContiguous, def.nBufferAlignment);
+
+ CAMHAL_LOGD("eDomain = %d",def.eDomain);
+
+ if (def.eDomain == OMX_PortDomainVideo) {
+ CAMHAL_LOGD("===============Video Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.video.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.video.nFrameWidth, def.format.video.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.video.nStride, def.format.video.nSliceHeight);
+ CAMHAL_LOGD("nBitrate=%d xFramerate=%d", def.format.video.nBitrate, def.format.video.xFramerate>>16);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.video.bFlagErrorConcealment, def.format.video.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.video.eColorFormat, def.format.video.pNativeWindow);
+ CAMHAL_LOGD("===============END Video Part===================");
+ }
+ else if (def.eDomain == OMX_PortDomainImage) {
+ CAMHAL_LOGD("===============Image Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.image.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.image.nFrameWidth, def.format.image.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.image.nStride, def.format.image.nSliceHeight);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.image.bFlagErrorConcealment, def.format.image.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.image.eColorFormat, def.format.image.pNativeWindow);
+ CAMHAL_LOGD("===============END Image Part===================");
+ }
+ CAMHAL_LOGD("----------Port settings end--------------------");
+}
+
+void OmxFrameDecoder::omxDumpBufferHeader(OMX_BUFFERHEADERTYPE* bh) {
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE start==============");
+ CAMHAL_LOGD("nAllocLen=%d nFilledLen=%d nOffset=%d nFlags=0x%x", bh->nAllocLen, bh->nFilledLen, bh->nOffset, bh->nFlags);
+ CAMHAL_LOGD("pBuffer=%p nOutputPortIndex=%d nInputPortIndex=%d nSize=0x%x", bh->pBuffer, bh->nOutputPortIndex, bh->nInputPortIndex, bh->nSize);
+ CAMHAL_LOGD("nVersion=0x%x", bh->nVersion);
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE end==============");
+}
+
+bool OmxFrameDecoder::getPaddedDimensions(size_t &width, size_t &height) {
+
+ switch (height) {
+
+ case 480: {
+ height = 576;
+ if (width == 640) {
+ width = 768;
+ }
+ break;
+ }
+ case 720: {
+ height = 832;
+ if (width == 1280) {
+ width = 1408;
+ }
+ break;
+ }
+ case 1080: {
+ height = 1184;
+ if (width == 1920) {
+ width = 2048;
+ }
+ break;
+ }
+
+ }
+
+ CAMHAL_LOGD("WxH updated to padded values : %d x %d", width, height);
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
new file mode 100644
index 0000000..e53fa83
--- /dev/null
+++ b/camera/SensorListener.cpp
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.cpp
+*
+* This file listens and propogates sensor events to CameraHal.
+*
+*/
+
+#include "SensorListener.h"
+
+#include <stdint.h>
+#include <math.h>
+#include <sys/types.h>
+
+namespace Ti {
+namespace Camera {
+
+/*** static declarations ***/
+static const float RADIANS_2_DEG = (float) (180 / M_PI);
+// measured values on device...might need tuning
+static const int DEGREES_90_THRESH = 50;
+static const int DEGREES_180_THRESH = 170;
+static const int DEGREES_270_THRESH = 250;
+
+static int sensor_events_listener(int fd, int events, void* data)
+{
+ SensorListener* listener = (SensorListener*) data;
+ ssize_t num_sensors;
+ ASensorEvent sen_events[8];
+ while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
+ for (int i = 0; i < num_sensors; i++) {
+ if (sen_events[i].type == android::Sensor::TYPE_ACCELEROMETER) {
+ float x = sen_events[i].vector.azimuth;
+ float y = sen_events[i].vector.pitch;
+ float z = sen_events[i].vector.roll;
+ float radius = 0;
+ int tilt = 0, orient = 0;
+
+ CAMHAL_LOGVA("ACCELEROMETER EVENT");
+ CAMHAL_LOGVB(" azimuth = %f pitch = %f roll = %f",
+ sen_events[i].vector.azimuth,
+ sen_events[i].vector.pitch,
+ sen_events[i].vector.roll);
+ // see http://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
+ // about conversion from cartesian to spherical for orientation calculations
+ radius = (float) sqrt(x * x + y * y + z * z);
+ tilt = (int) asinf(z / radius) * RADIANS_2_DEG;
+ orient = (int) atan2f(-x, y) * RADIANS_2_DEG;
+
+ if (orient < 0) {
+ orient += 360;
+ }
+
+ if (orient >= DEGREES_270_THRESH) {
+ orient = 270;
+ } else if (orient >= DEGREES_180_THRESH) {
+ orient = 180;
+ } else if (orient >= DEGREES_90_THRESH) {
+ orient = 90;
+ } else {
+ orient = 0;
+ }
+ listener->handleOrientation(orient, tilt);
+ CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
+ } else if (sen_events[i].type == android::Sensor::TYPE_GYROSCOPE) {
+ CAMHAL_LOGVA("GYROSCOPE EVENT");
+ }
+ }
+ }
+
+ if (num_sensors < 0 && num_sensors != -EAGAIN) {
+ CAMHAL_LOGEB("reading events failed: %s", strerror(-num_sensors));
+ }
+
+ return 1;
+}
+
+/****** public - member functions ******/
+SensorListener::SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ sensorsEnabled = 0;
+ mOrientationCb = NULL;
+ mSensorEventQueue = NULL;
+ mSensorLooperThread = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+SensorListener::~SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("Kill looper thread");
+ if (mSensorLooperThread.get()) {
+ // 1. Request exit
+ // 2. Wake up looper which should be polling for an event
+ // 3. Wait for exit
+ mSensorLooperThread->requestExit();
+ mSensorLooperThread->wake();
+ mSensorLooperThread->join();
+ mSensorLooperThread.clear();
+ mSensorLooperThread = NULL;
+ }
+
+ CAMHAL_LOGDA("Kill looper");
+ if (mLooper.get()) {
+ mLooper->removeFd(mSensorEventQueue->getFd());
+ mLooper.clear();
+ mLooper = NULL;
+ }
+ CAMHAL_LOGDA("SensorListener destroyed");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t SensorListener::initialize() {
+ status_t ret = NO_ERROR;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ android::sp<android::Looper> mLooper;
+
+ mSensorEventQueue = mgr.createEventQueue();
+ if (mSensorEventQueue == NULL) {
+ CAMHAL_LOGEA("createEventQueue returned NULL");
+ ret = NO_INIT;
+ goto out;
+ }
+
+ mLooper = new android::Looper(false);
+ mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
+
+ if (mSensorLooperThread.get() == NULL)
+ mSensorLooperThread = new SensorLooperThread(mLooper.get());
+
+ if (mSensorLooperThread.get() == NULL) {
+ CAMHAL_LOGEA("Couldn't create sensor looper thread");
+ ret = NO_MEMORY;
+ goto out;
+ }
+
+ ret = mSensorLooperThread->run("sensor looper thread", android::PRIORITY_URGENT_DISPLAY);
+ if (ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("thread already running ?!?");
+ } else if (ret != NO_ERROR) {
+ CAMHAL_LOGEA("couldn't run thread");
+ goto out;
+ }
+
+ out:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *cookie) {
+ LOG_FUNCTION_NAME;
+
+ if (orientation_cb) {
+ mOrientationCb = orientation_cb;
+ }
+ mCbCookie = cookie;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(&mLock);
+
+ if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ mOrientationCb(orientation, tilt, mCbCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::enableSensor(sensor_type_t type) {
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
+ if(sensor) {
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->enableSensor(sensor);
+ mSensorEventQueue->setEventRate(sensor, ms2ns(100));
+ sensorsEnabled |= SENSOR_ORIENTATION;
+ } else {
+ CAMHAL_LOGDB("not enabling absent orientation sensor");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::disableSensor(sensor_type_t type) {
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->disableSensor(sensor);
+ sensorsEnabled &= ~SENSOR_ORIENTATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/SwFrameDecoder.cpp b/camera/SwFrameDecoder.cpp
new file mode 100644
index 0000000..2ce2c0f
--- /dev/null
+++ b/camera/SwFrameDecoder.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "SwFrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+SwFrameDecoder::SwFrameDecoder()
+: mjpegWithHdrSize(0), mJpegWithHeaderBuffer(NULL) {
+}
+
+SwFrameDecoder::~SwFrameDecoder() {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+}
+
+
+void SwFrameDecoder::doConfigure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ mjpegWithHdrSize = (mParams.width * mParams.height / 2) +
+ mJpgdecoder.readDHTSize();
+ if (mJpegWithHeaderBuffer != NULL) {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+ }
+ mJpegWithHeaderBuffer = new unsigned char[mjpegWithHdrSize];
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void SwFrameDecoder::doProcessInputBuffer() {
+ LOG_FUNCTION_NAME;
+ nsecs_t timestamp = 0;
+
+ CAMHAL_LOGV("Will add header to MJPEG");
+ int final_jpg_sz = 0;
+ {
+ int inIndex = mInQueue.itemAt(0);
+ android::sp<MediaBuffer>& inBuffer = mInBuffers->editItemAt(inIndex);
+ android::AutoMutex lock(inBuffer->getLock());
+ timestamp = inBuffer->getTimestamp();
+ final_jpg_sz = mJpgdecoder.appendDHT(
+ reinterpret_cast<unsigned char*>(inBuffer->buffer),
+ inBuffer->filledLen, mJpegWithHeaderBuffer, mjpegWithHdrSize);
+ inBuffer->setStatus(BufferStatus_InDecoded);
+ }
+ CAMHAL_LOGV("Added header to MJPEG");
+ {
+ int outIndex = mOutQueue.itemAt(0);
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(outIndex);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* buffer = reinterpret_cast<CameraBuffer*>(outBuffer->buffer);
+ if (!mJpgdecoder.decode(mJpegWithHeaderBuffer, final_jpg_sz,
+ reinterpret_cast<unsigned char*>(buffer->mapped), 4096)) {
+ CAMHAL_LOGEA("Error while decoding JPEG");
+ return;
+ }
+ outBuffer->setTimestamp(timestamp);
+ outBuffer->setStatus(BufferStatus_OutFilled);
+ }
+ CAMHAL_LOGV("JPEG decoded!");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
new file mode 100644
index 0000000..fb511f2
--- /dev/null
+++ b/camera/TICameraParameters.cpp
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include <TICameraParameters.h>
+
+#define TI_KEY_ALGO_PREFIX "ti-algo-"
+
+namespace Ti {
+namespace Camera {
+
+//TI extensions to camera mode
+const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
+const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
+const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
+const char TICameraParameters::CP_CAM_MODE[] = "cp-cam";
+const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+const char TICameraParameters::VIDEO_MODE_HQ[] = "video-mode-hq";
+const char TICameraParameters::EXPOSURE_BRACKETING[] = "exposure-bracketing";
+const char TICameraParameters::ZOOM_BRACKETING[] = "zoom-bracketing";
+const char TICameraParameters::TEMP_BRACKETING[] = "temporal-bracketing";
+
+// TI extensions to standard android Parameters
+const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes";
+const char TICameraParameters::KEY_CAMERA[] = "camera-index";
+const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable";
+const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name";
+const char TICameraParameters::KEY_BURST[] = "burst-capture";
+const char TICameraParameters::KEY_CAP_MODE[] = "mode";
+const char TICameraParameters::KEY_CAP_MODE_VALUES[] = "mode-values";
+const char TICameraParameters::KEY_VNF[] = "vnf";
+const char TICameraParameters::KEY_VNF_SUPPORTED[] = "vnf-supported";
+const char TICameraParameters::KEY_SATURATION[] = "saturation";
+const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness";
+const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values";
+const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[] = "supported-manual-exposure-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[] = "supported-manual-exposure-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[] = "supported-manual-exposure-step";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "supported-manual-gain-iso-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "supported-manual-gain-iso-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "supported-manual-gain-iso-step";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE[] = "manual-exposure";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT[] = "manual-exposure-right";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO[] = "manual-gain-iso";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT[] = "manual-gain-iso-right";
+const char TICameraParameters::KEY_CONTRAST[] = "contrast";
+const char TICameraParameters::KEY_SHARPNESS[] = "sharpness";
+const char TICameraParameters::KEY_ISO[] = "iso";
+const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values";
+const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values";
+const char TICameraParameters::KEY_IPP[] = "ipp";
+const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode";
+const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range";
+const char TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE[] = "exp-gain-bracketing-range";
+const char TICameraParameters::KEY_ZOOM_BRACKETING_RANGE[] = "zoom-bracketing-range";
+const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative";
+const char TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE[] = "flush-shot-config-queue";
+const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement";
+const char TICameraParameters::KEY_GBCE[] = "gbce";
+const char TICameraParameters::KEY_GBCE_SUPPORTED[] = "gbce-supported";
+const char TICameraParameters::KEY_GLBCE[] = "glbce";
+const char TICameraParameters::KEY_GLBCE_SUPPORTED[] = "glbce-supported";
+const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso";
+const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation";
+const char TICameraParameters::KEY_RECORDING_HINT[] = "internal-recording-hint";
+const char TICameraParameters::KEY_AUTO_FOCUS_LOCK[] = "auto-focus-lock";
+const char TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED[] = "preview-fps-range-ext-values";
+const char TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED[] = "preview-fps-ext-values";
+
+const char TICameraParameters::RAW_WIDTH[] = "raw-width";
+const char TICameraParameters::RAW_HEIGHT[] = "raw-height";
+
+// TI extensions for Stereo Mode
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT[] = "s3d-prv-frame-layout";
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES[] = "s3d-prv-frame-layout-values";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT[] = "s3d-cap-frame-layout";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES[] = "s3d-cap-frame-layout-values";
+
+//TI extentions fo 3D resolutions
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "supported-picture-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "supported-picture-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "supported-picture-sidebyside-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "supported-preview-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "supported-preview-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "supported-preview-sidebyside-size-values";
+
+//TI extensions for SAC/SMC
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES[] = "auto-convergence-mode-values";
+const char TICameraParameters::KEY_MANUAL_CONVERGENCE[] = "manual-convergence";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "supported-manual-convergence-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "supported-manual-convergence-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "supported-manual-convergence-step";
+
+//TI extensions for setting EXIF tags
+const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model";
+const char TICameraParameters::KEY_EXIF_MAKE[] = "exif-make";
+
+//TI extensions for additiona GPS data
+const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum";
+const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version";
+const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp";
+
+// TI extensions for slice mode implementation for VTC
+const char TICameraParameters::KEY_VTC_HINT[] = "internal-vtc-hint";
+const char TICameraParameters::KEY_VIDEO_ENCODER_HANDLE[] = "encoder_handle";
+const char TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT[] = "encoder_slice_height";
+
+//TI extensions to Image post-processing
+const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf";
+const char TICameraParameters::IPP_LDC[] = "ldc";
+const char TICameraParameters::IPP_NSF[] = "nsf";
+const char TICameraParameters::IPP_NONE[] = "off";
+
+// TI extensions to standard android pixel formats
+const char TICameraParameters::PIXEL_FORMAT_UNUSED[] = "unused";
+const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps";
+const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo";
+const char TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY[] = "yuv422i-uyvy";
+
+// TI extensions to standard android scene mode settings
+const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup";
+const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua";
+const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach";
+const char TICameraParameters::SCENE_MODE_MOOD[] = "mood";
+const char TICameraParameters::SCENE_MODE_NIGHT_INDOOR[] = "night-indoor";
+const char TICameraParameters::SCENE_MODE_DOCUMENT[] = "document";
+const char TICameraParameters::SCENE_MODE_BARCODE[] = "barcode";
+const char TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT[] = "super-night";
+const char TICameraParameters::SCENE_MODE_VIDEO_CINE[] = "cine";
+const char TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM[] = "old-film";
+
+// TI extensions to standard android white balance values.
+const char TICameraParameters::WHITE_BALANCE_TUNGSTEN[] = "tungsten";
+const char TICameraParameters::WHITE_BALANCE_HORIZON[] = "horizon";
+const char TICameraParameters::WHITE_BALANCE_SUNSET[] = "sunset";
+const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority";
+
+// TI extensions to standard android focus modes.
+const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait";
+const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended";
+const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority";
+const char TICameraParameters::FOCUS_MODE_OFF[] = "off";
+
+// TI extensions to add values for effect settings.
+const char TICameraParameters::EFFECT_NATURAL[] = "natural";
+const char TICameraParameters::EFFECT_VIVID[] = "vivid";
+const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap";
+const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite";
+
+// TI extensions to add exposure preset modes
+const char TICameraParameters::EXPOSURE_MODE_MANUAL[] = "manual";
+const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto";
+const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night";
+const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting";
+const char TICameraParameters::EXPOSURE_MODE_SPOTLIGHT[] = "spotlight";
+const char TICameraParameters::EXPOSURE_MODE_SPORTS[] = "sports";
+const char TICameraParameters::EXPOSURE_MODE_SNOW[] = "snow";
+const char TICameraParameters::EXPOSURE_MODE_BEACH[] = "beach";
+const char TICameraParameters::EXPOSURE_MODE_APERTURE[] = "aperture";
+const char TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE[] = "small-aperture";
+const char TICameraParameters::EXPOSURE_MODE_FACE[] = "face-priority";
+
+// TI extensions to add iso values
+const char TICameraParameters::ISO_MODE_AUTO[] = "auto";
+const char TICameraParameters::ISO_MODE_100[] = "100";
+const char TICameraParameters::ISO_MODE_200[] = "200";
+const char TICameraParameters::ISO_MODE_400[] = "400";
+const char TICameraParameters::ISO_MODE_800[] = "800";
+const char TICameraParameters::ISO_MODE_1000[] = "1000";
+const char TICameraParameters::ISO_MODE_1200[] = "1200";
+const char TICameraParameters::ISO_MODE_1600[] = "1600";
+
+//TI extensions for stereo frame layouts
+const char TICameraParameters::S3D_NONE[] = "none";
+const char TICameraParameters::S3D_TB_FULL[] = "tb-full";
+const char TICameraParameters::S3D_SS_FULL[] = "ss-full";
+const char TICameraParameters::S3D_TB_SUBSAMPLED[] = "tb-subsampled";
+const char TICameraParameters::S3D_SS_SUBSAMPLED[] = "ss-subsampled";
+
+// TI extensions to add auto convergence values
+const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "disable";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "frame";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "center";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH[] = "touch";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "manual";
+
+//TI values for camera direction
+const char TICameraParameters::FACING_FRONT[]="front";
+const char TICameraParameters::FACING_BACK[]="back";
+
+//TI extensions to flash settings
+const char TICameraParameters::FLASH_MODE_FILL_IN[] = "fill-in";
+
+//TI extensions to add sensor orientation parameters
+const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0";
+const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90";
+const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180";
+const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270";
+
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "mechanical-misalignment-correction-supported";
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION[] = "mechanical-misalignment-correction";
+
+//TI extensions for enable/disable algos
+const char TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA[] = TI_KEY_ALGO_PREFIX "external-gamma";
+const char TICameraParameters::KEY_ALGO_NSF1[] = TI_KEY_ALGO_PREFIX "nsf1";
+const char TICameraParameters::KEY_ALGO_NSF2[] = TI_KEY_ALGO_PREFIX "nsf2";
+const char TICameraParameters::KEY_ALGO_SHARPENING[] = TI_KEY_ALGO_PREFIX "sharpening";
+const char TICameraParameters::KEY_ALGO_THREELINCOLORMAP[] = TI_KEY_ALGO_PREFIX "threelinecolormap";
+const char TICameraParameters::KEY_ALGO_GIC[] = TI_KEY_ALGO_PREFIX "gic";
+
+const char TICameraParameters::KEY_GAMMA_TABLE[] = "gamma-table";
+
+const char TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE[] = "preview-frame-rate-range";
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
new file mode 100644
index 0000000..bf8e5f1
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -0,0 +1,1802 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCameraAdapter.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+
+#include "V4LCameraAdapter.h"
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+#include "DebugUtils.h"
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+//#include <linux/videodev.h>
+#include <cutils/properties.h>
+#include "DecoderFactory.h"
+
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+
+#define Q16_OFFSET 16
+
+#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
+
+extern int setupM2MDevice();
+extern int closeM2MDevice();
+extern int startM2MDevice();
+extern int stopM2MDevice();
+extern int processFrame(void *srcHandle, int srcWidth, int srcHeight, int srcStride, const char *srcFmt, void *dstHandle,
+ int dstWidth, int dstHeight, int dstStride, const char *dstFmt, bool dei, int translen, int index,
+ int *in_index, int *out_index);
+
+namespace Ti {
+namespace Camera {
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+//Proto Types
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
+static void convertYUV422ToNV12Tiler(unsigned char *src, android_ycbcr *dest, int width, int height );
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
+
+android::Mutex gV4LAdapterLock;
+char device[MAX_VIDEO_DEVICES][MAX_PATH_LENGTH];
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ if(mDebugFps) {
+ mFrameCount++;
+ if ((mFrameCount % 30 == 0)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGE("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ }
+}
+
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+/*--------------------V4L wrapper functions -------------------------------*/
+
+bool V4LCameraAdapter::isNeedToUseDecoder() const {
+ return mPixelFormat != V4L2_PIX_FMT_YUYV;
+}
+
+status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
+ status_t ret = NO_ERROR;
+ errno = 0;
+
+ android::AutoMutex lock(mV4LLock);
+
+ do {
+ ret = ioctl (fd, req, argp);
+ }while (-1 == ret && EINTR == errno);
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitMmap(int& count, int width, int height) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+
+ //Since we will do mapping of new In buffers - clear input MediaBuffer storage
+ mInBuffers.clear();
+
+ for (int i = 0; i < count; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (NULL,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
+ return -1;
+ }
+
+ MediaBuffer* buffer = new MediaBuffer(i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ mInBuffers.push_back(buffer);
+ }
+
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerInputBuffers(&mInBuffers);
+ DecoderParameters params;
+ params.width = width;
+ params.height = height;
+ params.inputBufferCount = count;
+ params.outputBufferCount = count;
+ mDecoder->configure(params);
+ }
+
+
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitDmaBuf(int& count, int width, int height) {
+ status_t ret = NO_ERROR;
+ int bytes = width * height * 2;
+
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_DMABUF;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed for DMABUF: %s", strerror(errno));
+ return ret;
+ }
+ count = mVideoInfo->rb.count;
+
+ CAMHAL_LOGEB("Allocate CameraBufferlist [%d][%d][%d]\n", count, width, height);
+
+ mCameraBuffers = mMemoryManager->allocateBufferList(0, 0, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I, bytes, count);
+ if( NULL == mCameraBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate camera buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStartStreaming () {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ LOG_FUNCTION_NAME;
+
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = applyFpsValue();
+ if (ret != NO_ERROR) {
+ return ret;
+ }
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+ mVideoInfo->isStreaming = true;
+ }
+
+ // This is WA for some cameras with incorrect driver behavior
+ // there is possibility that fist frame after VIDIOC_STREAMON
+ // will remain from previous resolution/will be distorted
+ // for such cameras can be dynamically set frame count that will be
+ // skipped after frame on.
+ for (int i = 0; i < mSkipFramesCount; i++) {
+ int id = 0, length = 0;
+ if (GetFrame(id, length) != NULL) {
+ returnBufferToV4L(id);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ LOG_FUNCTION_NAME;
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret != 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ return ret;
+ }
+ mVideoInfo->isStreaming = false;
+
+ /* Unmap buffers */
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ for (int i = 0; i < nBufferCount; i++) {
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) {
+ CAMHAL_LOGEA("munmap() failed");
+ }
+ mVideoInfo->mem[i] = 0;
+ }
+
+ //free the memory allocated during REQBUFS, by setting the count=0
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = 0;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ }
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno));
+ }
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = pix_format;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = pix_format;
+
+ mDeinterlaceEnabled = (mVideoInfo->format.fmt.pix.field == V4L2_FIELD_NONE) ? 0 : 1;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+// v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ CAMHAL_LOGD("### Using: WxH = %dx%d pixelformat=0x%x ", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height, mVideoInfo->format.fmt.pix.pixelformat);
+ CAMHAL_LOGD("### Using: bytesperline=%d sizeimage=%d colorspace=0x%x", mVideoInfo->format.fmt.pix.bytesperline, mVideoInfo->format.fmt.pix.sizeimage, mVideoInfo->format.fmt.pix.colorspace);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::restartPreview ()
+{
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ struct v4l2_streamparm streamParams;
+
+ LOG_FUNCTION_NAME;
+
+ //configure for preview size and pixel format.
+ mParams.getPreviewSize(&width, &height);
+
+ ret = v4lSetFormat (width, height, mPixelFormat);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = v4lInitDmaBuf(mPreviewBufferCount, width, height);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ v4l2_buffer buf;
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+ buf.m.fd = mCameraBuffers[i].dma_buf_fd;
+ buf.length = mCameraBuffers[i].size;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+ nQueued++;
+ }
+
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mDecoder->queueOutputBuffer(i);
+ CAMHAL_LOGV("Queued output buffer with id=%d ", i);
+ }
+ mDecoder->start();
+ }
+
+ ret = v4lStartStreaming();
+ CAMHAL_LOGDA("Ready for preview....");
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/*--------------------Camera Adapter Functions-----------------------------*/
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
+ char value[PROPERTY_VALUE_MAX];
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ int ret = NO_ERROR;
+
+ // Allocate memory for video info structure
+ mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
+ if(!mVideoInfo) {
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ if ((mCameraHandle = open(device[mSensorIndex], O_RDWR | O_NONBLOCK) ) == -1) {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) {
+ CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ if(!mMemoryManager.get()) {
+ /// Create Memory Manager
+ mMemoryManager = new MemoryManager();
+ if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR)) {
+ CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
+ goto EXIT;
+ }
+ }
+
+ // Initialize flags
+ mPreviewing = false;
+ mVideoInfo->isStreaming = false;
+ mRecording = false;
+ mCapturing = false;
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+ int idx = 0;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ CAMHAL_LOGDB("===========Signal End Image Capture==========");
+ mLock.unlock();
+ mEndImageCaptureCallback(mEndCaptureData);
+ mLock.lock();
+ }
+ return ret;
+ }
+
+ if ( !mVideoInfo->isStreaming ) {
+ return ret;
+ }
+
+ for (int xx = 0; xx < NB_BUFFER; xx++){
+ if (mPreviewBufs[xx] == frameBuf){
+ idx = xx;
+ break;
+ }
+ }
+ if (idx == NB_BUFFER){
+ CAMHAL_LOGEB("Wrong index = %d. What do i do? What do i do?",idx);
+ return ret;
+ }
+ if(idx < 0) {
+ CAMHAL_LOGEB("Wrong index = %d",idx);
+ return ret;
+ }
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mOutBuffers.size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers.editItemAt(i);
+ CameraBuffer* buffer = static_cast<CameraBuffer*>(outBuffer->buffer);
+ if (buffer == frameBuf) {
+ mDecoder->queueOutputBuffer(outBuffer->bufferId);
+ break;
+ }
+ }
+
+ int inIndex = -1;
+ ret = mDecoder->dequeueInputBuffer(inIndex);
+
+ if (ret == NO_ERROR) {
+ ret = returnBufferToV4L(inIndex);
+ }
+
+ } else {
+ v4l2_buffer buf;
+ buf.index = idx;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+ buf.m.fd = mCameraBuffers[idx].dma_buf_fd;
+ buf.length = mCameraBuffers[idx].size;
+
+ CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+
+ nQueued++;
+ }
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::applyFpsValue() {
+ struct v4l2_streamparm streamParams;
+ status_t ret = NO_ERROR;
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = mFrameRate / CameraHal::VFR_SCALE;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
+ return ret;
+ }
+ int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
+ CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ int minFps = 0, maxFps = 0;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if(!mPreviewing && !mCapturing) {
+ params.getPreviewSize(&width, &height);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+ ret = v4lSetFormat( width, height, mPixelFormat);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ const char *frameRateRange = params.get(TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE);
+ bool fpsRangeParsed = CameraHal::parsePair(frameRateRange, &minFps, &maxFps, ',');
+ CAMHAL_ASSERT(fpsRangeParsed);
+ CAMHAL_LOGD("Current fps is %d new fps is (%d,%d)", mFrameRate, minFps, maxFps);
+ if (maxFps != mFrameRate) {
+ mFrameRate = maxFps;
+ }
+
+ }
+
+ // Udpate the current parameter set
+ mParams = params;
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+void V4LCameraAdapter::getParameters(android::CameraParameters& params)
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ // Return the current parameter set
+ params = mParams;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+///API to give the buffers to Adapter
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ mPreviewBufferCountQueueable = queueable;
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ case CAMERA_IMAGE_CAPTURE:
+ mCaptureBufferCountQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ break;
+
+ case CAMERA_VIDEO:
+ //@warn Video capture is not fully supported yet
+ mPreviewBufferCountQueueable = queueable;
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ case CAMERA_MEASUREMENT:
+ break;
+
+ default:
+ break;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
+ int ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mCaptureBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
+ }
+
+ mCaptureBuffersAvailable.clear();
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
+ }
+
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (int i = mCaptureBufferCountQueueable; i < num; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
+ }
+
+ // Update the preview buffer count
+ mCaptureBufferCount = num;
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
+{
+ int ret = NO_ERROR;
+ int width = 0, height = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ mParams.getPreviewSize(&width, &height);
+ ret = v4lInitDmaBuf(num, width, height);
+
+ mOutBuffers.clear();
+
+ if (ret == NO_ERROR) {
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mPreviewBufs[i] = &bufArr[i];
+ MediaBuffer* buffer = new MediaBuffer(i, mPreviewBufs[i]);
+ mOutBuffers.push_back(buffer);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x length=%d",i, mPreviewBufs[i], mFrameQueue.valueFor(mPreviewBufs[i])->mLength);
+ }
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerOutputBuffers(&mOutBuffers);
+ }
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+ }
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::takePicture() {
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ size_t yuv422i_buff_size = 0;
+ int index = 0;
+ char *fp = NULL;
+ CameraBuffer *buffer = NULL;
+ CameraFrame frame;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mCapturing) {
+ CAMHAL_LOGEA("Already Capture in Progress...");
+ return BAD_VALUE;
+ }
+
+ mPreviewing = false;
+ mLock.unlock();
+
+ {
+ android::AutoMutex stopLock(mStopLock);
+ CAMHAL_LOGW("Wait till preview stops");
+ ret = mStopCondition.waitRelative(mStopLock, 100000000);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGW("Timeout waiting for preview stop");
+ }
+ }
+
+ if (isNeedToUseDecoder()) {
+ mDecoder->stop();
+ mDecoder->flush();
+ }
+ mLock.lock();
+ mCapturing = true;
+ mPreviewing = false;
+
+ // Stop preview streaming
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ //configure for capture image size and pixel format.
+ mParams.getPictureSize(&width, &height);
+ CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
+ yuv422i_buff_size = width * height * 2;
+
+ ret = v4lSetFormat (width, height, DEFAULT_CAPTURE_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = v4lInitMmap(mCaptureBufferCount, width, height);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
+
+ v4l2_buffer buf;
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ return BAD_VALUE;
+ }
+ nQueued++;
+ }
+
+ ret = v4lStartStreaming();
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDA("Streaming started for Image Capture");
+
+ //get the frame and send to encode as JPG
+ int filledLen;
+ CAMHAL_LOGD("*********Will dequeue frame for Image Capture***********");
+
+ fp = this->GetFrame(index, filledLen);
+ if (!fp) {
+ CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
+ return BAD_VALUE;
+ }
+
+
+
+ CAMHAL_LOGDA("::Capture Frame received from V4L::");
+ buffer = mCaptureBufs.keyAt(index);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d fill_length=%d", index, buffer->opaque, yuv422i_buff_size, filledLen);
+
+ //copy the yuv422i data to the image buffer.
+ memcpy(buffer->opaque, fp, filledLen);
+
+#ifdef DUMP_CAPTURE_FRAME
+ //dump the YUV422 buffer in to a file
+ //a folder should have been created at /data/misc/camera/raw/
+ {
+ int fd =-1;
+ fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGEB("Unable to open file: %s", strerror(fd));
+ }
+ else {
+ write(fd, fp, yuv422i_buff_size );
+ close(fd);
+ CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::");
+ }
+ }
+#endif
+
+ CAMHAL_LOGDA("::sending capture frame to encoder::");
+ frame.mFrameType = CameraFrame::IMAGE_FRAME;
+ frame.mBuffer = buffer;
+ frame.mLength = yuv422i_buff_size;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME;
+ frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV;
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+
+ // Stop streaming after image capture
+ ret = v4lStopStreaming(mCaptureBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ ret = restartPreview();
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ mCaptureBufs.clear();
+
+ mCapturing = false;
+ mPreviewing = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ //autoFocus is not implemented. Just return.
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if(mPreviewing) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ startM2MDevice();
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ v4l2_buffer buf;
+
+ memset (&buf, 0, sizeof (struct v4l2_buffer));
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_DMABUF;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+ buf.m.fd = mCameraBuffers[i].dma_buf_fd;
+// buf.length = mCameraBuffers[i].size;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+ nQueued++;
+ }
+
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mDecoder->queueOutputBuffer(i);
+ CAMHAL_LOGV("Queued output buffer with id=%d ", i);
+ }
+ mDecoder->start();
+ }
+ ret = v4lStartStreaming();
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ if(!mCapturing) {
+ mPreviewThread = new PreviewThread(this);
+ CAMHAL_LOGDA("Created preview thread");
+ }
+
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+ mCapturing = false;
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+ mPreviewThread->requestExitAndWait();
+ mPreviewThread.clear();
+
+ android::AutoMutex lock(mLock);
+
+ if(!mPreviewing) {
+ return NO_INIT;
+ }
+ mPreviewing = false;
+ if (isNeedToUseDecoder()) {
+ android::AutoMutex lock(mStopLock);
+ mStopCondition.waitRelative(mStopLock, 100000000);
+ mDecoder->stop();
+ mDecoder->flush();
+ }
+
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
+ }
+
+ stopM2MDevice();
+
+ mMemoryManager->freeBufferList(mCameraBuffers);
+
+ nQueued = 0;
+ nDequeued = 0;
+ mFramesWithEncoder = 0;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 30) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/tmp/dump_%03d.h264", counter);
+ CAMHAL_LOGEB("Dumping h264 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
+{
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ v4l2_buffer buf;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+
+ /* DQ */
+ // Some V4L drivers, notably uvc, protect each incoming call with
+ // a driver-wide mutex. If we use poll() or blocking VIDIOC_DQBUF ioctl
+ // here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl.
+ while(true) {
+ if(!mVideoInfo->isStreaming) {
+ return NULL;
+ }
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &buf);
+ if((ret == 0) || (errno != EAGAIN)) {
+ break;
+ }
+ }
+
+ if (ret < 0) {
+ CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
+ return NULL;
+ }
+
+ index = -1;
+ for (int i = 0; i < NB_BUFFER; i ++) {
+ if (buf.m.fd == mCameraBuffers[i].dma_buf_fd) {
+ index = i;
+ break;
+ }
+ }
+ if (index == -1) {
+ CAMHAL_LOGEB("un-identified fd\n");
+ return NULL;
+ }
+
+ mCameraBuffers[index].actual_size = buf.bytesused;
+
+ debugShowFPS();
+ LOG_FUNCTION_NAME_EXIT;
+ return (char *)(&mCameraBuffers[index]);
+}
+
+
+
+
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ // Just return the current preview size, nothing more to do here.
+ mParams.getPreviewSize(( int * ) &width,( int * ) &height);
+
+ // TODO: This will reside until correct port reconfiguration handling will done.
+ if (isNeedToUseDecoder()) {
+ mDecoder->getPaddedDimensions(width, height);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ android::AutoMutex lock(mLock);
+ // We don't support meta data, so simply return
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
+{
+ int width = 0;
+ int height = 0;
+ int bytesPerPixel = 2; // for YUV422i; default pixel format
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ mParams.getPictureSize( &width, &height );
+ frame.mLength = width * height * bytesPerPixel;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width * bytesPerPixel;
+
+ CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
+ frame.mWidth, frame.mHeight, frame.mLength, frame.mAlignment);
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::recalculateFPS()
+{
+ float currentFPS;
+
+ mFrameCount++;
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void V4LCameraAdapter::setupWorkingMode() {
+ char value[PROPERTY_VALUE_MAX];
+ int v4lMode = 0;
+
+ property_get("camera.v4l.mode", value, "3");
+ v4lMode = atoi(value);
+
+ if (mDecoder) {
+ delete mDecoder;
+ mDecoder = NULL;
+ }
+
+ switch (v4lMode) {
+ case 0 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with HW decoding");
+ break;
+ }
+
+ case 1 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(false);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, true);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with SW decoding");
+ break;
+ }
+
+ case 2 : {
+ mPixelFormat = V4L2_PIX_FMT_H264;
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_H264, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_H264");
+ break;
+ }
+ default:
+ case 3 : {
+ mCameraHal->setExternalLocking(false);
+ mPixelFormat = V4L2_PIX_FMT_YUYV;
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_YUYV");
+ }
+
+ }
+}
+
+V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
+ :mPixelFormat(DEFAULT_PIXEL_FORMAT), mFrameRate(0), mCameraHal(hal),
+ mSkipFramesCount(0)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+
+ // Nothing useful to do in the constructor
+ mFramesWithEncoder = 0;
+ mDecoder = 0;
+ nQueued = 0;
+ nDequeued = 0;
+
+ setupWorkingMode();
+
+ setupM2MDevice();
+
+ property_get("camera.v4l.skipframes", value, "1");
+ mSkipFramesCount = atoi(value);
+
+ mSensorIndex = sensor_index;
+
+ mMemoryManager = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+V4LCameraAdapter::~V4LCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ // Close the camera handle and free the video info structure
+ close(mCameraHandle);
+
+ if (mVideoInfo)
+ {
+ free(mVideoInfo);
+ mVideoInfo = NULL;
+ }
+
+ delete mDecoder;
+
+ mInBuffers.clear();
+ mOutBuffers.clear();
+
+ closeM2MDevice();
+
+ /* Free the memory manager */
+ mMemoryManager.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
+ //convert YUV422I yuyv to uyvy format.
+ uint32_t *bf = (uint32_t*)src;
+ uint32_t *dst = (uint32_t*)dest;
+
+ LOG_FUNCTION_NAME;
+
+ if (!src || !dest) {
+ return;
+ }
+
+ for(size_t i = 0; i < size; i = i+4)
+ {
+ dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
+ bf++;
+ dst++;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12Tiler(unsigned char *src, android_ycbcr *ycbcr, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
+ unsigned char *bf = src;
+ unsigned char *dst_y = (unsigned char*)ycbcr->y;
+ unsigned char *dst_uv = (unsigned char*)ycbcr->cr; /* TBD: use bUVCbCrOrdering to decide cb vs cr */
+ int ystride = ycbcr->ystride;
+ int uvstride = ycbcr->cstride;
+
+#ifdef PPM_PER_FRAME_CONVERSION
+ static int frameCount = 0;
+ static nsecs_t ppm_diff = 0;
+ nsecs_t ppm_start = systemTime();
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ dst_y += (ystride - width);
+ }
+
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ dst_uv = dst_uv + (uvstride - width);
+ }
+
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ dst_y = dst_y + (ystride - width);
+ if (skip == 0) {
+ dst_uv = dst_uv + (uvstride - width);
+ }
+ } //end of for()
+ }
+
+#ifdef PPM_PER_FRAME_CONVERSION
+ ppm_diff += (systemTime() - ppm_start);
+ frameCount++;
+
+ if (frameCount >= 30) {
+ ppm_diff = ppm_diff / frameCount;
+ LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height,
+ ns2us(ppm_diff), ns2ms(ppm_diff) );
+ ppm_diff = 0;
+ frameCount = 0;
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format.
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + (width * height);
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+
+
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+void V4LCameraAdapter::returnOutputBuffer(int index)
+{
+ LOG_FUNCTION_NAME;
+
+ size_t width, height;
+ int stride = 4096;
+ CameraFrame frame;
+
+ getFrameSize(width, height);
+
+ android::Mutex::Autolock slock(mSubscriberLock);
+
+ android::sp<MediaBuffer>& buffer = mOutBuffers.editItemAt(index);
+
+ CameraBuffer* cbuffer = static_cast<CameraBuffer*>(buffer->buffer);
+
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = cbuffer;
+ if (isNeedToUseDecoder()) {
+ //We always get NV12 on out, when using decoder.
+ frame.mLength = height * stride * 3 / 2;
+ } else {
+ frame.mLength = CameraHal::calculateBufferSize(mParams.getPreviewFormat(), width, height);
+ }
+ frame.mAlignment = stride;
+ frame.mOffset = buffer->getOffset();
+ frame.mTimestamp = buffer->getTimestamp();
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
+
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+
+ int ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ //debugShowFPS();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t V4LCameraAdapter::returnBufferToV4L(int id) {
+ status_t ret = NO_ERROR;
+ v4l2_buffer buf;
+ buf.index = id;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+ buf.m.fd = mCameraBuffers[id].dma_buf_fd;
+ buf.length = mCameraBuffers[id].size;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed 0x%x", ret);
+ return FAILED_TRANSACTION;
+ }
+
+ return NO_ERROR;
+}
+
+
+int V4LCameraAdapter::previewThread()
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ CameraFrame frame;
+ void *y_uv[2];
+ int index = 0;
+ int filledLen = 0;
+ int stride = 4096;
+ char *fp = NULL;
+ int in_index =-1, out_index = -1;
+
+ mParams.getPreviewSize(&width, &height);
+
+ {
+ android::AutoMutex lock(mLock);
+ if (!mPreviewing) {
+ //If stop preview is called - it can now go on.
+ android::AutoMutex stopLock(mStopLock);
+ mStopCondition.signal();
+ return ret;
+ }
+ }
+
+ {
+ android::Mutex::Autolock lock(mSubscriberLock);
+ if ( mFrameSubscribers.size() == 0 ) {
+ return BAD_VALUE;
+ }
+ }
+
+ if (isNeedToUseDecoder()){
+
+ CAMHAL_LOGV("########### Decoder ###########");
+ int inIndex = -1, outIndex = -1;
+
+ if (GetFrame(index, filledLen) != NULL) {
+ CAMHAL_LOGD("Dequeued buffer from V4L with ID=%d", index);
+ mDecoder->queueInputBuffer(index);
+ }
+
+ while (NO_ERROR == mDecoder->dequeueInputBuffer(inIndex)) {
+ returnBufferToV4L(inIndex);
+ }
+
+ while (NO_ERROR == mDecoder->dequeueOutputBuffer(outIndex)) {
+ returnOutputBuffer(outIndex);
+ }
+
+ CAMHAL_LOGV("########### End Decode ###########");
+ goto EXIT;
+ }
+ else
+ {
+ fp = GetFrame(index, filledLen);
+
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ CAMHAL_LOGD("GOT IN frame with ID=%d",index);
+
+ CameraBuffer *buffer = mPreviewBufs[index];
+ int retVal = 0;
+ if (mPixelFormat == V4L2_PIX_FMT_YUYV) {
+ if (mDeinterlaceEnabled)
+ retVal = processFrame(fp, width, height, width, "yuyv", (void*)(*(buffer_handle_t*)(buffer->opaque)), width, height, PAGE_SIZE, "nv12", mDeinterlaceEnabled, 1, index, &in_index, &out_index);
+ else {
+ convertYUV422ToNV12Tiler(reinterpret_cast<unsigned char*>(((CameraBuffer*)fp)->mapped), &(buffer->ycbcr), width, height);
+ out_index = index;
+ }
+ if (retVal || mDeinterlaceEnabled) {
+ if (out_index == -1 || in_index == -1)
+ return 0;
+ }
+ }
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; mapped= 0x%x.",index, buffer, buffer->mapped);
+
+#ifdef SAVE_RAW_FRAMES
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
+#endif
+
+ android::Mutex::Autolock lock(mSubscriberLock);
+
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = mPreviewBufs[out_index];
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
+
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ }
+
+EXIT:
+
+ return ret;
+}
+
+//Sort the video devices array
+void sortVideoDevices(char** video_device_list, int num_devices)
+{
+ int n = num_devices;
+ char temp[MAX_PATH_LENGTH];
+
+ if (!video_device_list)
+ return;
+
+ for (int i=0;i<n;i++) {
+ for (int j=0;j<n-1;j++) {
+ if (strcmp(video_device_list[j],video_device_list[j+1])>0) {
+ strcpy(temp,video_device_list[j]);
+ strcpy(video_device_list[j],video_device_list[j+1]);
+ strcpy(video_device_list[j+1],temp);
+ }
+ }
+ }
+ return;
+}
+
+//scan for video devices
+void detectVideoDevice(char** video_device_list, int& num_device) {
+ char dir_path[MAX_PATH_LENGTH];
+ char* filename;
+ char** dev_list = video_device_list;
+ DIR *d;
+ struct dirent *dir;
+ int index = 0;
+
+ strcpy(dir_path, DEVICE_PATH);
+ d = opendir(dir_path);
+ if(d) {
+ //read each entry in the /dev/ and find if there is videox entry.
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ if (strncmp(filename, DEVICE_NAME, 5) == 0) {
+ strcpy(dev_list[index],DEVICE_PATH);
+ strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
+ index++;
+ }
+ } //end of while()
+ closedir(d);
+ num_device = index;
+
+ for(int i=0; i<index; i++){
+ CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
+ }
+}
+
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index, CameraHal* hal)
+{
+ CameraAdapter *adapter = NULL;
+ android::AutoMutex lock(gV4LAdapterLock);
+
+ LOG_FUNCTION_NAME;
+
+ adapter = new V4LCameraAdapter(sensor_index, hal);
+ if ( adapter ) {
+ CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
+ } else {
+ CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return adapter;
+}
+
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+ status_t ret = NO_ERROR;
+ struct v4l2_capability cap;
+ int tempHandle = NULL;
+ int num_cameras_supported = 0;
+ char device_list[MAX_VIDEO_DEVICES][MAX_PATH_LENGTH];
+ char* video_device_list[MAX_VIDEO_DEVICES];
+ int num_v4l_devices = 0;
+ int sensorId = 0;
+ CameraProperties::Properties* properties = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ supportedCameras = 0;
+ memset((void*)&cap, 0, sizeof(v4l2_capability));
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ for (int i = 0; i < MAX_VIDEO_DEVICES; i++) {
+ video_device_list[i] = device_list[i];
+ }
+ //look for the connected video devices
+ detectVideoDevice(video_device_list, num_v4l_devices);
+ sortVideoDevices(video_device_list, num_v4l_devices);
+
+ for (int i = 0; i < num_v4l_devices; i++) {
+ if ( (starting_camera + num_cameras_supported) < max_camera) {
+ sensorId = starting_camera + num_cameras_supported;
+
+ CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]);
+ if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
+ continue;
+ }
+
+ ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ close(tempHandle);
+ continue;
+ }
+
+ //check for video capture devices
+ if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ close(tempHandle);
+ continue;
+ }
+ strcpy(device[num_cameras_supported], video_device_list[i]);
+ properties = properties_array + starting_camera + num_cameras_supported;
+
+ //fetch capabilities for this camera
+ ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle );
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error while getting capabilities.");
+ close(tempHandle);
+ continue;
+ }
+
+ num_cameras_supported++;
+
+ }
+ //For now exit this loop once a valid video capture device is found.
+ //TODO: find all V4L capture devices and it capabilities
+ close(tempHandle);
+ }//end of for() loop
+
+ supportedCameras = num_cameras_supported;
+ CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported);
+ for (int i=0;i<supportedCameras;i++)
+ CAMHAL_LOGEB("Camera device path %d is %s", i, device[i]);
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ close(tempHandle);
+ return NO_ERROR;
+}
+
+} // namespace Camera
+} // namespace Ti
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
new file mode 100644
index 0000000..f7a2e02
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -0,0 +1,369 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCapabilities.cpp
+*
+* This file implements the V4L Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "V4LCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define MAX_RES_STRING_LENGTH 10
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+
+static const char PARAM_SEP[] = ",";
+
+//Camera defaults
+const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE_RANGE[] = "30000,30000";
+const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
+
+
+const CapPixelformat V4LCameraAdapter::mPixelformats [] = {
+ { V4L2_PIX_FMT_YUYV, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { V4L2_PIX_FMT_JPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+};
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate V4L Caps to Parameter ****/
+
+status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+
+ params->set(CameraProperties::CAMERA_NAME, "USBCAMERA");
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "320x240");
+ params->set(CameraProperties::JPEG_QUALITY, "90");
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, "50");
+ params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE);
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, "none");
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "auto");
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none");
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, "ldc-nsf");
+ params->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK);
+ params->set(CameraProperties::ORIENTATION_INDEX, 0);
+ params->set(CameraProperties::SENSOR_ORIENTATION, "0");
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
+
+ //For compatibility
+ params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS,"0");
+ params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, "0");
+ params->set(CameraProperties::ZOOM, "0");
+ params->set(CameraProperties::ZOOM_SUPPORTED, "true");
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewFormatCount; i++) {
+ for (unsigned int j = 0; j < ARRAY_SIZE(mPixelformats); j++) {
+ if(caps.ePreviewFormats[i] == mPixelformats[j].pixelformat ) {
+ strncat (supported, mPixelformats[j].param, MAX_PROP_VALUE_LENGTH-1 );
+ strncat (supported, PARAM_SEP, 1 );
+ }
+ }
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ strncat (supported, PARAM_SEP, 1 );
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420SP, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tPreviewRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertImageSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulCaptureResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tCaptureRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char temp[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulFrameRateCount; i++) {
+ snprintf (temp, sizeof(temp) - 1, "%d", caps.ulFrameRates[i] );
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, temp, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ memset(supported, 0, sizeof(supported));
+
+ for (int i = caps.ulFrameRateCount - 1; i >= 0 ; i--) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ snprintf(temp, sizeof(temp) - 1, "(%d,%d)", caps.ulFrameRates[i] * CameraHal::VFR_SCALE, caps.ulFrameRates[i] * CameraHal::VFR_SCALE);
+ strcat(supported, temp);
+ }
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertCapabilities(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFrameRates(params, caps);
+ }
+
+ //Insert Supported Focus modes.
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "infinity");
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, "jpeg");
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::sortAscend(V4L_TI_CAPTYPE &caps, uint16_t count) {
+ size_t tempRes;
+ size_t w, h, tmpW,tmpH;
+ for (int i=0; i<count; i++) {
+ w = caps.tPreviewRes[i].width;
+ h = caps.tPreviewRes[i].height;
+ tempRes = w*h;
+ for (int j=i+1; j<count; j++) {
+ tmpW = caps.tPreviewRes[j].width;
+ tmpH = caps.tPreviewRes[j].height;
+
+ if (tempRes > (tmpW * tmpH) ) {
+ caps.tPreviewRes[j].width = w;
+ caps.tPreviewRes[j].height = h;
+ w = tmpW;
+ h = tmpH;
+ }
+ }
+ caps.tPreviewRes[i].width = w;
+ caps.tPreviewRes[i].height = h;
+
+ }
+ return NO_ERROR;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params,
+ V4L_HANDLETYPE handle) {
+ status_t status = NO_ERROR;
+ V4L_TI_CAPTYPE caps;
+ int i = 0;
+ int j = 0;
+ struct v4l2_fmtdesc fmtDesc;
+ struct v4l2_frmsizeenum frmSizeEnum;
+ struct v4l2_frmivalenum frmIvalEnum;
+
+ //get supported pixel formats
+ for ( i = 0; status == NO_ERROR; i++) {
+ fmtDesc.index = i;
+ fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ status = ioctl (handle, VIDIOC_ENUM_FMT, &fmtDesc);
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("fmtDesc[%d].description::pixelformat::flags== (%s::%d::%d)",i, fmtDesc.description,fmtDesc.pixelformat,fmtDesc.flags);
+ caps.ePreviewFormats[i] = fmtDesc.pixelformat;
+ }
+ }
+ caps.ulPreviewFormatCount = i;
+
+ //get preview sizes & capture image sizes
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmSizeEnum.index = i;
+ //Check for frame sizes for default pixel format
+ //TODO: Check for frame sizes for all supported pixel formats
+ frmSizeEnum.pixel_format = DEFAULT_PIXEL_FORMAT;
+ status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum);
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmSizeEnum.index[%d].width x height == (%d x %d)", i, frmSizeEnum.discrete.width, frmSizeEnum.discrete.height);
+ caps.tPreviewRes[i].width = frmSizeEnum.discrete.width;
+ caps.tPreviewRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tPreviewRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+
+ caps.tCaptureRes[i].width = frmSizeEnum.discrete.width;
+ caps.tCaptureRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tCaptureRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+ }
+ else {
+ caps.ulCaptureResCount = i;
+ caps.ulPreviewResCount = i;
+ }
+ }
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ CAMHAL_LOGEA("Error while getting capabilities: V4L2_FRMSIZE_TYPE_CONTINUOUS & V4L2_FRMSIZE_TYPE_STEPWISE are not supported.");
+
+ //Initialized the below members to zero as type = V4L2_FRMSIZE_TYPE_CONTINUOUS & V4L2_FRMSIZE_TYPE_STEPWISE are not supported
+ caps.ulCaptureResCount = 0;
+ caps.ulPreviewResCount = 0;
+ }
+
+ //sort the preview sizes in ascending order
+ sortAscend(caps, caps.ulPreviewResCount);
+
+ //get supported frame rates
+ bool fps30 = false;
+ for ( j=caps.ulPreviewResCount-1; j >= 0; j--) {
+ CAMHAL_LOGDB(" W x H = %d x %d", caps.tPreviewRes[j].width, caps.tPreviewRes[j].height);
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmIvalEnum.index = i;
+ //Check for supported frame rates for the default pixel format.
+ frmIvalEnum.pixel_format = DEFAULT_PIXEL_FORMAT;
+ frmIvalEnum.width = caps.tPreviewRes[j].width;
+ frmIvalEnum.height = caps.tPreviewRes[j].height;
+
+ status = ioctl (handle, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum);
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmIvalEnum[%d].frame rate= %d)",i, (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator));
+ caps.ulFrameRates[i] = (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator);
+ if (caps.ulFrameRates[i] == 30) {
+ fps30 = true;
+ }
+ }
+ else {
+ caps.ulFrameRateCount = i;
+ }
+ }
+ if(fps30) {
+ break;
+ }
+ }
+
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ CAMHAL_LOGEA("Error while getting capabilities: V4L2_FRMIVAL_TYPE_CONTINUOUS & V4L2_FRMIVAL_TYPE_STEPWISE are not supported.");
+
+ //Initialized the below members to zero as type = V4L2_FRMIVAL_TYPE_CONTINUOUS & V4L2_FRMIVAL_TYPE_STEPWISE are not supported
+ caps.ulFrameRateCount = 0;
+ }
+
+ //update the preview resolution with the highest resolution which supports 30fps.
+/* // for video preview the application choose the resolution from the mediaprofiles.xml.
+ // so populating all supported preview resolution is required for video mode.
+ caps.tPreviewRes[0].width = caps.tPreviewRes[j].width;
+ caps.tPreviewRes[0].height = caps.tPreviewRes[j].height;
+ snprintf(caps.tPreviewRes[0].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[j].width,caps.tPreviewRes[j].height);
+ caps.ulPreviewResCount = 1;
+*/
+ insertCapabilities (params, caps);
+ return NO_ERROR;
+}
+
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LM2M.cpp b/camera/V4LCameraAdapter/V4LM2M.cpp
new file mode 100644
index 0000000..eaa2e36
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LM2M.cpp
@@ -0,0 +1,625 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LM2M.cpp
+*
+* This file implements the V4L M2M color conversion / scaling using VPE h/w block
+*
+*/
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <stdint.h>
+#include <string.h>
+#include <errno.h>
+
+#include <linux/videodev2.h>
+#include <linux/v4l2-controls.h>
+
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+#include <utils/Log.h>
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <dirent.h>
+#include <ui/GraphicBuffer.h>
+#include "CameraHal.h"
+
+#define pexit(str) { \
+ ALOGE(str); \
+ return -EINVAL; \
+}
+
+#define V4L2_CID_TRANS_NUM_BUFS (V4L2_CID_PRIVATE_BASE)
+#define DEVICE "/dev/videoxx"
+#define DEVICE_PATH "/dev/"
+#define DEVICE_NAME "videoxx"
+
+
+/**< File descriptor for device */
+static int fd = -1;
+
+static char *video_dev;
+
+static int streaming = 0;
+static int field = V4L2_FIELD_TOP;
+static int init_queued_count = 0;
+static enum v4l2_colorspace src_colorspace, dst_colorspace;
+static int src_coplanar = 0, dst_coplanar = 0;
+static int srcSize = 0, dstSize = 0, srcSize_uv = 0, dstSize_uv = 0;
+static int src_numbuf = 6;
+static int dst_numbuf = 6;
+static int startup_threshold = 2;
+/*
+ * Convert a format name string into fourcc and calculate the
+ * size of one frame and set flags
+ */
+int describeFormat (
+ const char *format,
+ int width,
+ int height,
+ int *size,
+ int *fourcc,
+ int *coplanar,
+ enum v4l2_colorspace *clrspc)
+{
+ *size = -1;
+ *fourcc = -1;
+ if (strcmp (format, "rgb24") == 0) {
+ *fourcc = V4L2_PIX_FMT_RGB24;
+ *size = height * width * 3;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SRGB;
+
+ } else if (strcmp (format, "bgr24") == 0) {
+ *fourcc = V4L2_PIX_FMT_BGR24;
+ *size = height * width * 3;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SRGB;
+
+ } else if (strcmp (format, "argb32") == 0) {
+ *fourcc = V4L2_PIX_FMT_RGB32;
+ *size = height * width * 4;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SRGB;
+
+ } else if (strcmp (format, "abgr32") == 0) {
+ *fourcc = V4L2_PIX_FMT_BGR32;
+ *size = height * width * 4;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SRGB;
+
+ } else if (strcmp (format, "yuv444") == 0) {
+ *fourcc = V4L2_PIX_FMT_YUV444;
+ *size = height * width * 3;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "yvyu") == 0) {
+ *fourcc = V4L2_PIX_FMT_YVYU;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "yuyv") == 0) {
+ *fourcc = V4L2_PIX_FMT_YUYV;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "uyvy") == 0) {
+ *fourcc = V4L2_PIX_FMT_UYVY;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "vyuy") == 0) {
+ *fourcc = V4L2_PIX_FMT_VYUY;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "nv16") == 0) {
+ *fourcc = V4L2_PIX_FMT_NV16;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "nv61") == 0) {
+ *fourcc = V4L2_PIX_FMT_NV61;
+ *size = height * width * 2;
+ *coplanar = 0;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "nv12") == 0) {
+ *fourcc = V4L2_PIX_FMT_NV12;
+ *size = height * width * 1.5;
+ *coplanar = 1;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else if (strcmp (format, "nv21") == 0) {
+ *fourcc = V4L2_PIX_FMT_NV21;
+ *size = height * width * 1.5;
+ *coplanar = 1;
+ *clrspc = V4L2_COLORSPACE_SMPTE170M;
+
+ } else {
+ return 0;
+
+ }
+
+ return 1;
+}
+
+/*
+ * Set format of one of the CAPTURE or OUTPUT stream (passed as type)
+ * Request to allocate memory mapped buffers, map them and store
+ * the mapped addresses of y and uv buffers into the array
+ */
+int allocBuffers(
+ int type,
+ int width,
+ int height,
+ int coplanar,
+ enum v4l2_colorspace clrspc,
+ int *sizeimage_y,
+ int *sizeimage_uv,
+ int fourcc,
+ int *numbuf,
+ int interlace)
+{
+ struct v4l2_format fmt;
+ struct v4l2_requestbuffers reqbuf;
+ struct v4l2_buffer buffer;
+ struct v4l2_plane buf_planes[2];
+ int i = 0;
+ int ret = -1;
+
+ bzero(&fmt,sizeof(fmt));
+ fmt.type = type;
+ fmt.fmt.pix_mp.width = width;
+ fmt.fmt.pix_mp.height = height;
+ fmt.fmt.pix_mp.pixelformat = fourcc;
+ fmt.fmt.pix_mp.colorspace = clrspc;
+
+ if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE && interlace)
+ fmt.fmt.pix_mp.field = V4L2_FIELD_ALTERNATE;
+ else
+ fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
+
+ /* Set format and update the sizes of y and uv planes
+ * If the returned value is changed, it means driver corrected the size
+ */
+ ret = ioctl(fd,VIDIOC_S_FMT,&fmt);
+ if(ret < 0) {
+ pexit("Cant set color format\n");
+ } else {
+ *sizeimage_y = fmt.fmt.pix_mp.plane_fmt[0].sizeimage;
+ *sizeimage_uv = fmt.fmt.pix_mp.plane_fmt[1].sizeimage;
+ }
+
+ bzero(&reqbuf,sizeof(reqbuf));
+ reqbuf.count = *numbuf;
+ reqbuf.type = type;
+ reqbuf.memory = V4L2_MEMORY_DMABUF;
+
+ ret = ioctl (fd, VIDIOC_REQBUFS, &reqbuf);
+ if(ret < 0) {
+ pexit("Cant request buffers\n");
+ } else {
+ *numbuf = reqbuf.count;
+ }
+
+ for(i = 0; i < *numbuf; i++) {
+
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.type = type;
+ buffer.memory = V4L2_MEMORY_DMABUF;
+ buffer.index = i;
+ buffer.m.planes = buf_planes;
+ buffer.length = coplanar ? 2 : 1;
+
+ ret = ioctl (fd, VIDIOC_QUERYBUF, &buffer);
+ if (ret < 0)
+ pexit("Cant query buffers\n");
+
+ ALOGV("query buf, plane 0 = %d, plane 1 = %d\n", buffer.m.planes[0].length,
+ buffer.m.planes[1].length);
+ }
+
+ return 1;
+}
+
+
+/* Queue one buffer (identified by index) */
+int queue(
+ int type,
+ int index,
+ int field,
+ void* buf_handle,
+ int size_y,
+ int size_uv)
+{
+ struct v4l2_buffer buffer;
+ struct v4l2_plane buf_planes[2];
+ int ret = -1;
+ int num_planes = 0;
+
+ if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ buf_planes[0].bytesused = size_y;
+ buf_planes[1].bytesused = size_uv;
+ buf_planes[0].length = size_y + size_uv;//Need to check how to use this... ((buffer_handle_t)(IMG_native_handle_t))->[0]
+ buf_planes[1].length = size_y + size_uv;
+ buf_planes[0].data_offset = 0;
+ buf_planes[1].data_offset = size_y;
+ buf_planes[0].m.fd = ((buffer_handle_t)(buf_handle))->data[0];
+ buf_planes[1].m.fd = ((buffer_handle_t)(buf_handle))->data[0];
+ num_planes = 2;
+ } else if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
+ buf_planes[0].length = buf_planes[0].bytesused = ((Ti::Camera::CameraBuffer*)(buf_handle))->actual_size;
+ buf_planes[0].data_offset = 0;
+ buf_planes[0].m.fd = ((Ti::Camera::CameraBuffer*)(buf_handle))->dma_buf_fd;
+ num_planes = 1;
+ }
+
+ memset(&buffer,0,sizeof(buffer));
+ buffer.type = type;
+ buffer.memory = V4L2_MEMORY_DMABUF;
+ buffer.index = index;
+ buffer.m.planes = buf_planes;
+ buffer.field = field;
+ buffer.length = 2;
+
+ ret = ioctl (fd, VIDIOC_QBUF, &buffer);
+ if(-1 == ret) {
+ pexit("Failed to queue\n");
+ }
+ return ret;
+}
+
+/*
+ * Dequeue one buffer
+ * Index of dequeue buffer is returned by ioctl
+ */
+int dequeue(int type, struct v4l2_buffer *buf, struct v4l2_plane *buf_planes)
+{
+ int ret = -1;
+
+ memset(buf, 0, sizeof(*buf));
+ buf->type = type;
+ buf->memory = V4L2_MEMORY_DMABUF;
+ buf->m.planes = buf_planes;
+ buf->length = 2;
+ ret = ioctl (fd, VIDIOC_DQBUF, buf);
+ if(-1 == ret) {
+ pexit("Failed to dequeue\n");
+ }
+ return ret;
+}
+
+/* Start processing */
+int streamON (
+ int type)
+{
+ int ret = -1;
+ ret = ioctl (fd, VIDIOC_STREAMON, &type);
+ if(-1 == ret) {
+ pexit("Cant Stream on\n");
+ }
+
+ return ret;
+}
+
+/* Stop processing */
+int streamOFF (
+ int type)
+{
+ int ret = -1;
+ ret = ioctl (fd, VIDIOC_STREAMOFF, &type);
+ if(-1 == ret) {
+ pexit("Cant Stream on\n");
+ }
+
+ return ret;
+}
+
+//scan for video devices
+void detectVideoDevice(char** video_device_list, int& num_device) {
+ char dir_path[20];
+ char* filename;
+ char** dev_list = video_device_list;
+ DIR *d;
+ struct dirent *dir;
+ int index = 0;
+
+ strcpy(dir_path, DEVICE_PATH);
+ d = opendir(dir_path);
+ if(d) {
+ //read each entry in the /dev/ and find if there is videox entry.
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ if (strncmp(filename, DEVICE_NAME, 5) == 0) {
+ strcpy(dev_list[index],DEVICE_PATH);
+ strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
+ index++;
+ }
+ } //end of while()
+ closedir(d);
+ num_device = index;
+
+ for(int i=0; i<index; i++){
+ ALOGV("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
+ }
+}
+
+static char device[15];
+char *detectM2MDevice()
+{
+ char device_list[5][15];
+ char* video_device_list[5];
+ int num_v4l_devices = 0;
+ int tempHandle = NULL;
+ int ret;
+ struct v4l2_capability cap;
+
+ memset((void*)&cap, 0, sizeof(v4l2_capability));
+
+ for (int i = 0; i < 5; i++) {
+ video_device_list[i] = device_list[i];
+ }
+ //look for the connected video devices
+ detectVideoDevice(video_device_list, num_v4l_devices);
+
+ for (int i = 0; i < num_v4l_devices; i++) {
+ ALOGV("Opening device[%d] = %s..",i, video_device_list[i]);
+ if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
+ ALOGE("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
+ continue;
+ }
+
+ ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
+ if (ret < 0) {
+ ALOGE("Error when querying the capabilities of the V4L Camera");
+ close(tempHandle);
+ continue;
+ }
+
+ //check for video capture devices
+ if (!((cap.capabilities & V4L2_CAP_VIDEO_M2M_MPLANE) && (cap.capabilities & V4L2_CAP_STREAMING))) {
+ ALOGE("Error while adapter initialization: video capture not supported.");
+ close(tempHandle);
+ continue;
+ }
+
+ strcpy(device, video_device_list[i]);
+ ALOGE("Found VPE M2M device %s!!!", device);
+ break;
+ }
+
+ return device;
+}
+
+int setupM2MDevice()
+{
+ video_dev = detectM2MDevice();
+
+ ALOGV("setupM2MDevice");
+
+ return 0;
+}
+
+int closeM2MDevice()
+{
+ ALOGV("closeM2MDevice");
+
+ return 0;
+}
+
+int startM2MDevice()
+{
+ ALOGV("startM2MDevice");
+
+ fd = open(video_dev,O_RDWR);
+ if(fd < 0) {
+ pexit("Can't open device");
+ }
+
+ /* Just a NOP for now */
+ return 0;
+}
+
+int stopM2MDevice()
+{
+ if (!streaming)
+ return 0;
+
+ ALOGV("stopM2MDevice");
+
+ /* Driver cleanup */
+ streamOFF (V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
+ streamOFF (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+
+ close(fd);
+
+ streaming = 0;
+ init_queued_count = 0;
+ field = V4L2_FIELD_TOP;
+
+ return 0;
+}
+
+int processFrame(
+ void *srcHandle,
+ int srcWidth,
+ int srcHeight,
+ int srcStride,
+ const char *srcFmt,
+ void *dstHandle,
+ int dstWidth,
+ int dstHeight,
+ int dstStride,
+ const char *dstFmt,
+ bool dei,
+ int translen, int index, int *in_index, int *out_index)
+{
+ int i, ret = 0;
+
+ int srcFourcc = 0, dstFourcc = 0;
+ int num_frames = 1;
+ int interlace = 0;
+ struct v4l2_control ctrl;
+ num_frames = 1;
+
+ interlace = dei;
+ translen = translen;
+
+// if (interlace)
+// srcHeight /=2;
+
+ if (!streaming && init_queued_count==0) {
+ describeFormat (srcFmt, srcWidth, srcHeight, &srcSize, &srcFourcc, &src_coplanar, &src_colorspace);
+ describeFormat (dstFmt, dstWidth, dstHeight, &dstSize, &dstFourcc, &dst_coplanar, &dst_colorspace);
+
+ /* Number of buffers to process in one transaction - translen */
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_TRANS_NUM_BUFS;
+ ctrl.value = translen;
+ ret = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
+ if (ret < 0)
+ pexit("Can't set translen control");
+
+ /* Allocate buffers for CAPTURE and OUTPUT stream */
+ ret = allocBuffers (V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, srcWidth,
+ srcHeight, src_coplanar, src_colorspace, &srcSize, &srcSize_uv,
+ srcFourcc, &src_numbuf, interlace);
+ if(ret < 0) {
+ pexit("Cant Allocate buffers for OUTPUT device\n");
+ }
+
+ ret = allocBuffers (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, dstWidth,
+ dstHeight, dst_coplanar, dst_colorspace, &dstSize, &dstSize_uv,
+ dstFourcc, &dst_numbuf, interlace);
+ if(ret < 0) {
+ pexit("Cant Allocate buffers for CAPTURE device\n");
+ }
+
+ ALOGI ("Start Streaming Input = %d x %d , %d(%s)\nOutput = %d x %d , %d(%s)\n",
+ srcWidth, srcHeight, srcFourcc, srcFmt,
+ dstWidth, dstHeight, dstFourcc, dstFmt);
+
+ }
+
+ ALOGI ("Input = %d x %d , %d(%s)\nOutput = %d x %d , %d(%s)\n",
+ srcWidth, srcHeight, srcFourcc, srcFmt,
+ dstWidth, dstHeight, dstFourcc, dstFmt);
+
+ if (!streaming && init_queued_count != startup_threshold) {
+
+ queue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, index, V4L2_FIELD_NONE, dstHandle, dstSize, dstSize_uv);
+
+
+ /*************************************
+ Driver is ready Now
+ *************************************/
+
+ queue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, index, field, srcHandle, srcSize, srcSize_uv);
+ if (field == V4L2_FIELD_TOP)
+ field = V4L2_FIELD_BOTTOM;
+ else
+ field = V4L2_FIELD_TOP;
+ init_queued_count++;
+ return -1;
+ }
+
+ if(!streaming) {
+ /*************************************
+ Data is ready Now
+ *************************************/
+ queue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, index, V4L2_FIELD_NONE, dstHandle, dstSize, dstSize_uv);
+ queue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, index, field, srcHandle, srcSize, srcSize_uv);
+
+ streamON (V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
+ streamON (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+ } else {
+ queue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, index, V4L2_FIELD_NONE, dstHandle, dstSize, dstSize_uv);
+
+ /*************************************
+ Driver is ready Now
+ *************************************/
+ queue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, index, field, srcHandle, srcSize, srcSize_uv);
+ if (field == V4L2_FIELD_TOP)
+ field = V4L2_FIELD_BOTTOM;
+ else
+ field = V4L2_FIELD_TOP;
+ }
+
+ while (num_frames) {
+ struct v4l2_buffer buf;
+ struct v4l2_plane buf_planes[2];
+
+ /* Wait for and dequeue one buffer from OUTPUT
+ * to write data for next interlaced field */
+ dequeue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, &buf, buf_planes);
+ ALOGV("dequeued source buffer with index %d\n", buf.index);
+ *in_index = buf.index;
+
+ /* Dequeue progressive frame from CAPTURE stream
+ * write to the file and queue one empty buffer */
+ dequeue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, &buf, buf_planes);
+ ALOGV("dequeued dest buffer with index %d\n", buf.index);
+
+ *out_index = buf.index;
+ num_frames--;
+
+ ALOGV("frames left %d\n", num_frames);
+ }
+
+ streaming = 1;
+ return 0;
+}
+
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
new file mode 100644
index 0000000..eba91bb
--- /dev/null
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include "CameraHal.h"
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * Display handler class - This class basically handles the buffer posting to display
+ */
+
+class ANativeWindowDisplayAdapter : public DisplayAdapter
+{
+public:
+
+ typedef struct
+ {
+ CameraBuffer *mBuffer;
+ void *mUser;
+ int mOffset;
+ int mWidth;
+ int mHeight;
+ int mWidthStride;
+ int mHeightStride;
+ int mLength;
+ CameraFrame::FrameType mType;
+ } DisplayFrame;
+
+ enum DisplayStates
+ {
+ DISPLAY_INIT = 0,
+ DISPLAY_STARTED,
+ DISPLAY_STOPPED,
+ DISPLAY_EXITED
+ };
+
+public:
+
+ ANativeWindowDisplayAdapter();
+ virtual ~ANativeWindowDisplayAdapter();
+
+ ///Initializes the display adapter creates any resources required
+ virtual status_t initialize();
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
+ virtual int disableDisplay(bool cancel_buffer = true);
+ virtual status_t pauseDisplay(bool pause);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Used for shot to snapshot measurement
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL);
+
+#endif
+
+ virtual bool supportsExternalBuffering();
+
+ //Implementation of inherited interfaces
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBufferList(CameraBuffer * buflist);
+
+ virtual status_t maxQueueableBuffers(unsigned int& queueable);
+ virtual status_t minUndequeueableBuffers(int& unqueueable);
+
+ // If set to true ANativeWindowDisplayAdapter will not lock/unlock graphic buffers
+ void setExternalLocking(bool extBuffLocking);
+
+ ///Class specific functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ void frameCallback(CameraFrame* caFrame);
+
+ void displayThread();
+
+ private:
+ void destroy();
+ bool processHalMsg();
+ status_t PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame);
+ bool handleFrameReturn();
+ status_t returnBuffersToWindow();
+
+public:
+
+ static const int DISPLAY_TIMEOUT;
+ static const int FAILED_DQS_TO_SUSPEND;
+
+ class DisplayThread : public android::Thread
+ {
+ ANativeWindowDisplayAdapter* mDisplayAdapter;
+ Utils::MessageQueue mDisplayThreadQ;
+
+ public:
+ DisplayThread(ANativeWindowDisplayAdapter* da)
+ : Thread(false), mDisplayAdapter(da) { }
+
+ ///Returns a reference to the display message Q for display adapter to post messages
+ Utils::MessageQueue& msgQ()
+ {
+ return mDisplayThreadQ;
+ }
+
+ virtual bool threadLoop()
+ {
+ mDisplayAdapter->displayThread();
+ return false;
+ }
+
+ enum DisplayThreadCommands
+ {
+ DISPLAY_START,
+ DISPLAY_STOP,
+ DISPLAY_FRAME,
+ DISPLAY_EXIT
+ };
+ };
+
+ //friend declarations
+friend class DisplayThread;
+
+private:
+ int postBuffer(void* displayBuf);
+
+private:
+ bool mFirstInit;
+ bool mSuspend;
+ int mFailedDQs;
+ bool mPaused; //Pause state
+ preview_stream_ops_t* mANativeWindow;
+ android::sp<DisplayThread> mDisplayThread;
+ FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
+ Utils::MessageQueue mDisplayQ;
+ unsigned int mDisplayState;
+ ///@todo Have a common class for these members
+ mutable android::Mutex mLock;
+ bool mDisplayEnabled;
+ int mBufferCount;
+ CameraBuffer *mBuffers;
+ //buffer_handle_t** mBufferHandleMap; // -> frames[i].BufferHandle
+ //IMG_native_handle_t** mGrallocHandleMap; // -> frames[i].GrallocHandle
+ uint32_t* mOffsetsMap; // -> frames[i].Offset
+ int mFD;
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::KeyedVector<int, int> mFramesType;
+ android::sp<ErrorNotifier> mErrorNotifier;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ uint32_t mXOff;
+ uint32_t mYOff;
+
+ const char *mPixelFormat;
+
+ //In case if we ,as example, using out buffers in Ducati Decoder
+ //DOMX will handle lock/unlock of graphic buffers
+ bool mUseExternalBufferLocking;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for calculating standby to first shot
+ struct timeval mStandbyToShot;
+ bool mMeasureStandby;
+ //Used for shot to snapshot/shot calculation
+ struct timeval mStartCapture;
+ bool mShotToShot;
+
+#endif
+
+};
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h
new file mode 100644
index 0000000..a4b27ac
--- /dev/null
+++ b/camera/inc/BaseCameraAdapter.h
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef BASE_CAMERA_ADAPTER_H
+#define BASE_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+
+namespace Ti {
+namespace Camera {
+
+struct LUT {
+ const char * userDefinition;
+ int halDefinition;
+};
+
+struct LUTtypeHAL{
+ int size;
+ const LUT *Table;
+};
+
+class BaseCameraAdapter : public CameraAdapter
+{
+
+public:
+
+ BaseCameraAdapter();
+ virtual ~BaseCameraAdapter();
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL);
+ virtual void disableMsgType(int32_t msgs, void* cookie);
+ virtual void returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType);
+ virtual void addFramePointers(CameraBuffer *frameBuf, android_ycbcr* ycbcr);
+ virtual void removeFramePointers();
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0, int value4 = 0 );
+
+ virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data);
+
+ virtual status_t registerEndCaptureCallback(end_image_capture_callback callback, void *user_data);
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState();
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState();
+
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) { mSharedAllocator = shmem_alloc; return NO_ERROR; };
+
+ // Rolls the state machine back to INTIALIZED_STATE from the current state
+ virtual status_t rollbackToInitializedState();
+
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation);
+ virtual status_t commitState();
+ virtual status_t rollbackState();
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state);
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state);
+
+ //-----------Interface that needs to be implemented by deriving classes --------------------
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t takePicture();
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t stopImageCapture();
+
+ //Should be implmented by deriving classes in order to start temporal bracketing
+ virtual status_t startBracketing(int range);
+
+ //Should be implemented by deriving classes in order to stop temporal bracketing
+ virtual status_t stopBracketing();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t autoFocus();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t cancelAutoFocus();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t startVideoCapture();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t stopVideoCapture();
+
+ //Should be implemented by deriving classes in order to start camera preview
+ virtual status_t startPreview();
+
+ //Should be implemented by deriving classes in order to stop camera preview
+ virtual status_t stopPreview();
+
+ //Should be implemented by deriving classes in order to start smooth zoom
+ virtual status_t startSmoothZoom(int targetIdx);
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t stopSmoothZoom();
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable);
+
+ //Should be implemented by deriving classes in order queue a released buffer in CameraAdapter
+ virtual status_t fillThisBuffer(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
+
+ //API to get the frame size required to be allocated. This size is used to override the size passed
+ //by camera service when VSTAB/VNF is turned ON for example
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+
+ //API to get required data frame size
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+
+ //API to get required picture buffers size with the current configuration in CameraParameters
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
+
+ // Should be implemented by deriving classes in order to start face detection
+ // ( if supported )
+ virtual status_t startFaceDetection();
+
+ // Should be implemented by deriving classes in order to stop face detection
+ // ( if supported )
+ virtual status_t stopFaceDetection();
+
+ virtual status_t switchToExecuting();
+
+ virtual status_t setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height);
+
+ virtual status_t destroyTunnel();
+
+ virtual status_t cameraPreviewInitialization();
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ // ---------------------Interface ends-----------------------------------
+
+ status_t notifyFocusSubscribers(CameraHalEvent::FocusStatus status);
+ status_t notifyShutterSubscribers();
+ status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
+ status_t notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta);
+
+ //Send the frame to subscribers
+ status_t sendFrameToSubscribers(CameraFrame *frame);
+
+ //Resets the refCount for this particular frame
+ status_t resetFrameRefCount(CameraFrame &frame);
+
+ //A couple of helper functions
+ void setFrameRefCountByType(CameraBuffer* frameBuf, CameraFrame::FrameType frameType, int refCount);
+ int getFrameRefCount(CameraBuffer* frameBuf);
+ int getFrameRefCountByType(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
+ int setInitFrameRefCount(CameraBuffer* buf, unsigned int mask);
+ static const char* getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT);
+
+// private member functions
+private:
+ status_t __sendFrameToSubscribers(CameraFrame* frame,
+ android::KeyedVector<int, frame_callback> *subscribers,
+ CameraFrame::FrameType frameType);
+ status_t rollbackToPreviousState();
+
+// protected data types and variables
+protected:
+ enum FrameState {
+ STOPPED = 0,
+ RUNNING
+ };
+
+ enum FrameCommands {
+ START_PREVIEW = 0,
+ START_RECORDING,
+ RETURN_FRAME,
+ STOP_PREVIEW,
+ STOP_RECORDING,
+ DO_AUTOFOCUS,
+ TAKE_PICTURE,
+ FRAME_EXIT
+ };
+
+ enum AdapterCommands {
+ ACK = 0,
+ ERROR
+ };
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval mStartFocus;
+ struct timeval mStartCapture;
+
+#endif
+
+ mutable android::Mutex mReturnFrameLock;
+
+ //Lock protecting the Adapter state
+ mutable android::Mutex mLock;
+ AdapterState mAdapterState;
+ AdapterState mNextState;
+
+ //Different frame subscribers get stored using these
+ android::KeyedVector<int, frame_callback> mFrameSubscribers;
+ android::KeyedVector<int, frame_callback> mSnapshotSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoInSubscribers;
+ android::KeyedVector<int, frame_callback> mImageSubscribers;
+ android::KeyedVector<int, frame_callback> mRawSubscribers;
+ android::KeyedVector<int, event_callback> mFocusSubscribers;
+ android::KeyedVector<int, event_callback> mZoomSubscribers;
+ android::KeyedVector<int, event_callback> mShutterSubscribers;
+ android::KeyedVector<int, event_callback> mMetadataSubscribers;
+
+ //Preview buffer management data
+ CameraBuffer *mPreviewBuffers;
+ int mPreviewBufferCount;
+ size_t mPreviewBuffersLength;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBuffersAvailable;
+ mutable android::Mutex mPreviewBufferLock;
+
+ //Snapshot buffer management data
+ android::KeyedVector<CameraBuffer *, int> mSnapshotBuffersAvailable;
+ mutable android::Mutex mSnapshotBufferLock;
+
+ //Video buffer management data
+ CameraBuffer *mVideoBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoBuffersAvailable;
+ int mVideoBuffersCount;
+ size_t mVideoBuffersLength;
+ mutable android::Mutex mVideoBufferLock;
+
+ //Image buffer management data
+ CameraBuffer *mCaptureBuffers;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable;
+ int mCaptureBuffersCount;
+ size_t mCaptureBuffersLength;
+ mutable android::Mutex mCaptureBufferLock;
+
+ //Metadata buffermanagement
+ CameraBuffer *mPreviewDataBuffers;
+ android::KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable;
+ int mPreviewDataBuffersCount;
+ size_t mPreviewDataBuffersLength;
+ mutable android::Mutex mPreviewDataBufferLock;
+
+ //Video input buffer management data (used for reproc pipe)
+ CameraBuffer *mVideoInBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoInBuffersAvailable;
+ mutable android::Mutex mVideoInBufferLock;
+
+ Utils::MessageQueue mFrameQ;
+ Utils::MessageQueue mAdapterQ;
+ mutable android::Mutex mSubscriberLock;
+ ErrorNotifier *mErrorNotifier;
+ release_image_buffers_callback mReleaseImageBuffersCallback;
+ end_image_capture_callback mEndImageCaptureCallback;
+ void *mReleaseData;
+ void *mEndCaptureData;
+ bool mRecording;
+
+ camera_request_memory mSharedAllocator;
+
+ uint32_t mFramesWithDucati;
+ uint32_t mFramesWithDisplay;
+ uint32_t mFramesWithEncoder;
+
+#ifdef CAMERAHAL_DEBUG
+ android::Mutex mBuffersWithDucatiLock;
+ android::KeyedVector<int, bool> mBuffersWithDucati;
+#endif
+
+ android::KeyedVector<void *, CameraFrame *> mFrameQueue;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif //BASE_CAMERA_ADAPTER_H
+
+
diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h
new file mode 100644
index 0000000..c006b9d
--- /dev/null
+++ b/camera/inc/BufferSourceAdapter.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BUFFER_SOURCE_ADAPTER_H
+#define BUFFER_SOURCE_ADAPTER_H
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "CameraHal.h"
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * Handles enqueueing/dequeing buffers to tap-in/tap-out points
+ * TODO(XXX): this class implements DisplayAdapter for now
+ * but this will most likely change once tap-in/tap-out points
+ * are better defined
+ */
+
+class BufferSourceAdapter : public DisplayAdapter
+{
+// private types
+private:
+ ///Constant declarations
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+
+ // helper class to return frame in different thread context
+ class ReturnFrame : public android::Thread {
+ public:
+ ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ android::AutoMutex lock(mReturnFrameMutex);
+ mDestroying = false;
+ mFrameCount = 0;
+ }
+
+ ~ReturnFrame() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ }
+
+ void signal() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ mFrameCount++;
+ mReturnFrameCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ android::AutoMutex lock(mReturnFrameMutex);
+ mDestroying = true;
+ mReturnFrameCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ if ( 0 >= mFrameCount ) {
+ mReturnFrameCondition.wait(mReturnFrameMutex);
+ }
+ if (!mDestroying) {
+ mBufferSourceAdapter->handleFrameReturn();
+ mFrameCount--;
+ }
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ android::Condition mReturnFrameCondition;
+ android::Mutex mReturnFrameMutex;
+ int mFrameCount;
+ bool mDestroying;
+ };
+
+ // helper class to queue frame in different thread context
+ class QueueFrame : public android::Thread {
+ public:
+ QueueFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ mDestroying = false;
+ }
+
+ ~QueueFrame() {
+ }
+
+ void addFrame(CameraFrame *frame) {
+ android::AutoMutex lock(mFramesMutex);
+ mFrames.add(new CameraFrame(*frame));
+ mFramesCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ mDestroying = true;
+
+ android::AutoMutex lock(mFramesMutex);
+ while (!mFrames.empty()) {
+ CameraFrame *frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ frame->mMetaData.clear();
+ delete frame;
+ }
+ mFramesCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ CameraFrame *frame = NULL;
+ {
+ android::AutoMutex lock(mFramesMutex);
+ while (mFrames.empty() && !mDestroying) mFramesCondition.wait(mFramesMutex);
+ if (!mDestroying) {
+ frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ }
+ }
+
+ if (frame) {
+ mBufferSourceAdapter->handleFrameCallback(frame);
+ frame->mMetaData.clear();
+
+ if (frame->mFrameType != CameraFrame::REPROCESS_INPUT_FRAME) {
+ // signal return frame thread that it can dequeue a buffer now
+ mBufferSourceAdapter->mReturnFrame->signal();
+ }
+
+ delete frame;
+ }
+
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ android::Vector<CameraFrame *> mFrames;
+ android::Condition mFramesCondition;
+ android::Mutex mFramesMutex;
+ bool mDestroying;
+ };
+
+ enum {
+ BUFFER_SOURCE_TAP_IN,
+ BUFFER_SOURCE_TAP_OUT
+ };
+
+// public member functions
+public:
+ BufferSourceAdapter();
+ virtual ~BufferSourceAdapter();
+
+ virtual status_t initialize();
+ virtual int setPreviewWindow(struct preview_stream_ops *source);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
+ virtual int disableDisplay(bool cancel_buffer = true);
+ virtual status_t pauseDisplay(bool pause);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ // Not implemented in this class
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL) { return NO_ERROR; }
+#endif
+ virtual bool supportsExternalBuffering();
+ virtual CameraBuffer * allocateBufferList(int width, int dummyHeight, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBufferList(CameraBuffer * buflist);
+ virtual int maxQueueableBuffers(unsigned int& queueable);
+ virtual int minUndequeueableBuffers(int& unqueueable);
+ virtual bool match(const char * str);
+
+ virtual CameraBuffer * getBuffers(bool reset = false);
+ virtual unsigned int getSize();
+ virtual int getBufferCount();
+
+ static void frameCallback(CameraFrame* caFrame);
+ void addFrame(CameraFrame* caFrame);
+ void handleFrameCallback(CameraFrame* caFrame);
+ bool handleFrameReturn();
+
+private:
+ void destroy();
+ status_t returnBuffersToWindow();
+
+private:
+ preview_stream_ops_t* mBufferSource;
+ FrameProvider *mFrameProvider; // Pointer to the frame provider interface
+
+ mutable android::Mutex mLock;
+ int mBufferCount;
+ CameraBuffer *mBuffers;
+
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ReturnFrame> mReturnFrame;
+ android::sp<QueueFrame> mQueueFrame;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ int mBufferSourceDirection;
+
+ const char *mPixelFormat;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
+#endif
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
new file mode 100644
index 0000000..1af7857
--- /dev/null
+++ b/camera/inc/CameraHal.h
@@ -0,0 +1,1550 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_CAMERA_HARDWARE_H
+
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+
+#include <hardware/camera.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <utils/threads.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <camera/CameraParameters.h>
+#ifdef OMAP_ENHANCEMENT_CPCAM
+#include <camera/CameraMetadata.h>
+#include <camera/ShotParameters.h>
+#endif
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBuffer.h>
+
+/* For IMG_native_handle_t */
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+#include "Common.h"
+#include "MessageQueue.h"
+#include "Semaphore.h"
+#include "CameraProperties.h"
+#include "SensorListener.h"
+
+/*DRM*/
+extern "C" {
+#include <xf86drm.h>
+#include <omap_drm.h>
+#include <omap_drmif.h>
+}
+
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_Y8 0x103
+#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+#define HAL_PIXEL_FORMAT_TI_UYVY 0x105
+
+#define MIN_WIDTH 640
+#define MIN_HEIGHT 480
+#define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */
+#define PICTURE_HEIGHT 2448 /* 5mp - 2048. 8mp - 2464 */ /* Make sure it is a multiple of 16. */
+#define PREVIEW_WIDTH 176
+#define PREVIEW_HEIGHT 144
+#define PIXEL_FORMAT V4L2_PIX_FMT_UYVY
+
+#define VIDEO_FRAME_COUNT_MAX 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_CAMERA_BUFFERS 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_ZOOM 3
+#define THUMB_WIDTH 80
+#define THUMB_HEIGHT 60
+#define PIX_YUV422I 0
+#define PIX_YUV420P 1
+
+#define SATURATION_OFFSET 100
+#define SHARPNESS_OFFSET 100
+#define CONTRAST_OFFSET 100
+
+#define FRAME_RATE_HIGH_HD 60
+
+#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
+ GRALLOC_USAGE_HW_RENDER | \
+ GRALLOC_USAGE_SW_READ_RARELY | \
+ GRALLOC_USAGE_SW_WRITE_NEVER
+
+//Enables Absolute PPM measurements in logcat
+#define PPM_INSTRUMENTATION_ABS 1
+
+#define LOCK_BUFFER_TRIES 5
+#define HAL_PIXEL_FORMAT_NV12 0x100
+
+#define OP_STR_SIZE 100
+
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
+
+#define CAMHAL_SIZE_OF_ARRAY(x) static_cast<int>(sizeof(x)/sizeof(x[0]))
+
+namespace Ti {
+namespace Camera {
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+extern const char * const kRawImagesOutputDirPath;
+extern const char * const kYuvImagesOutputDirPath;
+#endif
+#define V4L_CAMERA_NAME_USB "USBCAMERA"
+#define OMX_CAMERA_NAME_OV "OV5640"
+#define OMX_CAMERA_NAME_SONY "IMX060"
+
+
+///Forward declarations
+class CameraHal;
+class CameraFrame;
+class CameraHalEvent;
+class DisplayFrame;
+
+class FpsRange {
+public:
+ static int compare(const FpsRange * left, const FpsRange * right);
+
+ FpsRange(int min, int max);
+ FpsRange();
+
+ bool operator==(const FpsRange & fpsRange) const;
+
+ bool isNull() const;
+ bool isFixed() const;
+
+ int min() const;
+ int max() const;
+
+private:
+ int mMin;
+ int mMax;
+};
+
+
+inline int FpsRange::compare(const FpsRange * const left, const FpsRange * const right) {
+ if ( left->max() < right->max() ) {
+ return -1;
+ }
+
+ if ( left->max() > right->max() ) {
+ return 1;
+ }
+
+ if ( left->min() < right->min() ) {
+ return -1;
+ }
+
+ if ( left->min() > right->min() ) {
+ return 1;
+ }
+
+ return 0;
+}
+
+inline FpsRange::FpsRange(const int min, const int max) : mMin(min), mMax(max) {}
+
+inline FpsRange::FpsRange() : mMin(-1), mMax(-1) {}
+
+inline bool FpsRange::operator==(const FpsRange & fpsRange) const {
+ return mMin == fpsRange.mMin && mMax == fpsRange.mMax;
+}
+
+inline bool FpsRange::isNull() const {
+ return mMin == -1 || mMax == -1;
+}
+
+inline bool FpsRange::isFixed() const {
+ return mMin == mMax;
+}
+
+inline int FpsRange::min() const { return mMin; }
+
+inline int FpsRange::max() const { return mMax; }
+
+class CameraArea : public android::RefBase
+{
+public:
+
+ CameraArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ size_t weight) : mTop(top),
+ mLeft(left),
+ mBottom(bottom),
+ mRight(right),
+ mWeight(weight) {}
+
+ status_t transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight);
+
+ bool isValid()
+ {
+ return ( ( 0 != mTop ) || ( 0 != mLeft ) || ( 0 != mBottom ) || ( 0 != mRight) );
+ }
+
+ bool isZeroArea()
+ {
+ return ( (0 == mTop ) && ( 0 == mLeft ) && ( 0 == mBottom )
+ && ( 0 == mRight ) && ( 0 == mWeight ));
+ }
+
+ size_t getWeight()
+ {
+ return mWeight;
+ }
+
+ bool compare(const android::sp<CameraArea> &area);
+
+ static status_t parseAreas(const char *area,
+ size_t areaLength,
+ android::Vector< android::sp<CameraArea> > &areas);
+
+ static status_t checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight);
+
+ static bool areAreasDifferent(android::Vector< android::sp<CameraArea> > &, android::Vector< android::sp<CameraArea> > &);
+
+protected:
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t WEIGHT_MIN = 1;
+ static const ssize_t WEIGHT_MAX = 1000;
+
+ ssize_t mTop;
+ ssize_t mLeft;
+ ssize_t mBottom;
+ ssize_t mRight;
+ size_t mWeight;
+};
+
+class CameraMetadataResult : public android::RefBase
+{
+public:
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CameraMetadataResult(camera_memory_t * extMeta) : mExtendedMetadata(extMeta) {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+ };
+#endif
+
+ CameraMetadataResult() {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedMetadata = NULL;
+#endif
+ }
+
+ virtual ~CameraMetadataResult() {
+ if ( NULL != mMetadata.faces ) {
+ free(mMetadata.faces);
+ }
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mExtendedMetadata ) {
+ mExtendedMetadata->release(mExtendedMetadata);
+ }
+#endif
+ }
+
+ camera_frame_metadata_t *getMetadataResult() { return &mMetadata; };
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *getExtendedMetadata() { return mExtendedMetadata; };
+#endif
+
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t INVALID_DATA = -2000;
+
+private:
+
+ camera_frame_metadata_t mMetadata;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *mExtendedMetadata;
+#endif
+};
+
+typedef enum {
+ CAMERA_BUFFER_NONE = 0,
+ CAMERA_BUFFER_GRALLOC,
+ CAMERA_BUFFER_ANW,
+ CAMERA_BUFFER_MEMORY,
+ CAMERA_BUFFER_ION,
+ CAMERA_BUFFER_DRM
+} CameraBufferType;
+
+typedef struct _CameraBuffer {
+ CameraBufferType type;
+ /* opaque is the generic drop-in replacement for the pointers
+ * that were used previously */
+ void *opaque;
+
+ /* opaque has different meanings depending on the buffer type:
+ * GRALLOC - gralloc_handle_t
+ * ANW - a pointer to the buffer_handle_t (which corresponds to
+ * the ANativeWindowBuffer *)
+ * MEMORY - address of allocated memory
+ * ION - address of mapped ion allocation
+ *
+ * FIXME opaque should be split into several fields:
+ * - handle/pointer we got from the allocator
+ * - handle/value we pass to OMX
+ * - pointer to mapped memory (if the buffer is mapped)
+ */
+
+ /* mapped holds ptr to mapped memory in userspace */
+ void *mapped;
+ android_ycbcr ycbcr;
+
+ /* These are specific to DRM buffers */
+ struct omap_device *omapdrm_dev;
+ struct omap_bo *omapdrm_bo;
+ int omapdrm_fd;
+
+ /* These are specific to ION buffers */
+// struct ion_handle * ion_handle;
+// int ion_fd;
+// int fd;
+ int dma_buf_fd;
+ size_t size;
+ int index;
+
+ /* These describe the camera buffer */
+ int width;
+ int stride;
+ int height;
+ const char *format;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval ppmStamp;
+
+#endif
+
+ /* These are for buffers which include borders */
+ int offset; // where valid data starts
+ int actual_size; // size of the entire buffer with borders
+ int privateData;
+} CameraBuffer;
+
+void * camera_buffer_get_omx_ptr (CameraBuffer *buffer);
+
+class CameraFrame
+{
+ public:
+
+ enum FrameType
+ {
+ PREVIEW_FRAME_SYNC = 0x1, ///SYNC implies that the frame needs to be explicitly returned after consuming in order to be filled by camera again
+ PREVIEW_FRAME = 0x2 , ///Preview frame includes viewfinder and snapshot frames
+ IMAGE_FRAME_SYNC = 0x4, ///Image Frame is the image capture output frame
+ IMAGE_FRAME = 0x8,
+ VIDEO_FRAME_SYNC = 0x10, ///Timestamp will be updated for these frames
+ VIDEO_FRAME = 0x20,
+ FRAME_DATA_SYNC = 0x40, ///Any extra data assosicated with the frame. Always synced with the frame
+ FRAME_DATA= 0x80,
+ RAW_FRAME = 0x100,
+ SNAPSHOT_FRAME = 0x200,
+ REPROCESS_INPUT_FRAME = 0x400,
+ ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported
+ };
+
+ enum FrameQuirks
+ {
+ ENCODE_RAW_YUV422I_TO_JPEG = 0x1 << 0,
+ HAS_EXIF_DATA = 0x1 << 1,
+ FORMAT_YUV422I_YUYV = 0x1 << 2,
+ FORMAT_YUV422I_UYVY = 0x1 << 3,
+ };
+
+ //default contrustor
+ CameraFrame():
+ mCookie(NULL),
+ mCookie2(NULL),
+ mBuffer(NULL),
+ mFrameType(0),
+ mTimestamp(0),
+ mWidth(0),
+ mHeight(0),
+ mOffset(0),
+ mAlignment(0),
+ mFd(0),
+ mLength(0),
+ mFrameMask(0),
+ mQuirks(0)
+ {
+ mYuv[0] = NULL;
+ mYuv[1] = NULL;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetaData = 0;
+#endif
+ }
+
+ void *mCookie;
+ void *mCookie2;
+ CameraBuffer *mBuffer;
+ int mFrameType;
+ nsecs_t mTimestamp;
+ unsigned int mWidth, mHeight;
+ uint32_t mOffset;
+ unsigned int mAlignment;
+ int mFd;
+ size_t mLength;
+ unsigned mFrameMask;
+ unsigned int mQuirks;
+ unsigned int mYuv[2];
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ android::sp<CameraMetadataResult> mMetaData;
+#endif
+ ///@todo add other member vars like stride etc
+};
+
+enum CameraHalError
+{
+ CAMERA_ERROR_FATAL = 0x1, //Fatal errors can only be recovered by restarting media server
+ CAMERA_ERROR_HARD = 0x2, // Hard errors are hardware hangs that may be recoverable by resetting the hardware internally within the adapter
+ CAMERA_ERROR_SOFT = 0x4, // Soft errors are non fatal errors that can be recovered from without needing to stop use-case
+};
+
+///Common Camera Hal Event class which is visible to CameraAdapter,DisplayAdapter and AppCallbackNotifier
+///@todo Rename this class to CameraEvent
+class CameraHalEvent
+{
+public:
+ //Enums
+ enum CameraHalEventType {
+ NO_EVENTS = 0x0,
+ EVENT_FOCUS_LOCKED = 0x1,
+ EVENT_FOCUS_ERROR = 0x2,
+ EVENT_ZOOM_INDEX_REACHED = 0x4,
+ EVENT_SHUTTER = 0x8,
+ EVENT_METADATA = 0x10,
+ ///@remarks Future enum related to display, like frame displayed event, could be added here
+ ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported
+ };
+
+ enum FocusStatus {
+ FOCUS_STATUS_SUCCESS = 0x1,
+ FOCUS_STATUS_FAIL = 0x2,
+ FOCUS_STATUS_PENDING = 0x4,
+ FOCUS_STATUS_DONE = 0x8,
+ };
+
+ ///Class declarations
+ ///@remarks Add a new class for a new event type added above
+
+ //Shutter event specific data
+ typedef struct ShutterEventData_t {
+ bool shutterClosed;
+ }ShutterEventData;
+
+ ///Focus event specific data
+ typedef struct FocusEventData_t {
+ FocusStatus focusStatus;
+ int currentFocusValue;
+ } FocusEventData;
+
+ ///Zoom specific event data
+ typedef struct ZoomEventData_t {
+ int currentZoomIndex;
+ bool targetZoomIndexReached;
+ } ZoomEventData;
+
+ typedef struct FaceData_t {
+ ssize_t top;
+ ssize_t left;
+ ssize_t bottom;
+ ssize_t right;
+ size_t score;
+ } FaceData;
+
+ typedef android::sp<CameraMetadataResult> MetaEventData;
+
+ class CameraHalEventData : public android::RefBase{
+
+ public:
+
+ CameraHalEvent::FocusEventData focusEvent;
+ CameraHalEvent::ZoomEventData zoomEvent;
+ CameraHalEvent::ShutterEventData shutterEvent;
+ CameraHalEvent::MetaEventData metadataEvent;
+ };
+
+ //default contrustor
+ CameraHalEvent():
+ mCookie(NULL),
+ mEventType(NO_EVENTS) {}
+
+ //copy constructor
+ CameraHalEvent(const CameraHalEvent &event) :
+ mCookie(event.mCookie),
+ mEventType(event.mEventType),
+ mEventData(event.mEventData) {};
+
+ void* mCookie;
+ CameraHalEventType mEventType;
+ android::sp<CameraHalEventData> mEventData;
+
+};
+
+/// Have a generic callback class based on template - to adapt CameraFrame and Event
+typedef void (*frame_callback) (CameraFrame *cameraFrame);
+typedef void (*event_callback) (CameraHalEvent *event);
+
+//signals CameraHAL to relase image buffers
+typedef void (*release_image_buffers_callback) (void *userData);
+typedef void (*end_image_capture_callback) (void *userData);
+
+/**
+ * Interface class implemented by classes that have some events to communicate to dependendent classes
+ * Dependent classes use this interface for registering for events
+ */
+class MessageNotifier
+{
+public:
+ static const uint32_t EVENT_BIT_FIELD_POSITION;
+ static const uint32_t FRAME_BIT_FIELD_POSITION;
+
+ ///@remarks Msg type comes from CameraFrame and CameraHalEvent classes
+ /// MSB 16 bits is for events and LSB 16 bits is for frame notifications
+ /// FrameProvider and EventProvider classes act as helpers to event/frame
+ /// consumers to call this api
+ virtual void enableMsgType(int32_t msgs, frame_callback frameCb=NULL, event_callback eventCb=NULL, void* cookie=NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+
+ virtual ~MessageNotifier() {};
+};
+
+class ErrorNotifier : public virtual android::RefBase
+{
+public:
+ virtual void errorNotify(int error) = 0;
+
+ virtual ~ErrorNotifier() {};
+};
+
+
+/**
+ * Interace class abstraction for Camera Adapter to act as a frame provider
+ * This interface is fully implemented by Camera Adapter
+ */
+class FrameNotifier : public MessageNotifier
+{
+public:
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, android_ycbcr *ycbcr) = 0;
+ virtual void removeFramePointers() = 0;
+
+ virtual ~FrameNotifier() {};
+};
+
+/** * Wrapper class around Frame Notifier, which is used by display and notification classes for interacting with Camera Adapter
+ */
+class FrameProvider
+{
+ FrameNotifier* mFrameNotifier;
+ void* mCookie;
+ frame_callback mFrameCallback;
+
+public:
+ FrameProvider(FrameNotifier *fn, void* cookie, frame_callback frameCallback)
+ :mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { }
+
+ int enableFrameNotification(int32_t frameTypes);
+ int disableFrameNotification(int32_t frameTypes);
+ int returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
+ void addFramePointers(CameraBuffer *frameBuf, android_ycbcr *ycbcr);
+ void removeFramePointers();
+};
+
+/** Wrapper class around MessageNotifier, which is used by display and notification classes for interacting with
+ * Camera Adapter
+ */
+class EventProvider
+{
+public:
+ MessageNotifier* mEventNotifier;
+ void* mCookie;
+ event_callback mEventCallback;
+
+public:
+ EventProvider(MessageNotifier *mn, void* cookie, event_callback eventCallback)
+ :mEventNotifier(mn), mCookie(cookie), mEventCallback(eventCallback) {}
+
+ int enableEventNotification(int32_t eventTypes);
+ int disableEventNotification(int32_t eventTypes);
+};
+
+/*
+ * Interface for providing buffers
+ */
+class BufferProvider
+{
+public:
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+
+ // gets a buffer list from BufferProvider when buffers are sent from external source and already pre-allocated
+ // only call this function for an input source into CameraHal. If buffers are not from a pre-allocated source
+ // this function will return NULL and numBufs of -1
+ virtual CameraBuffer *getBufferList(int *numBufs) = 0;
+
+ //additional methods used for memory mapping
+ virtual uint32_t * getOffsets() = 0;
+ virtual int getFd() = 0;
+ virtual CameraBuffer * getBuffers(bool reset = false) { return NULL; }
+ virtual unsigned int getSize() {return 0; }
+ virtual int getBufferCount() {return -1; }
+
+ virtual int freeBufferList(CameraBuffer * buf) = 0;
+
+ virtual ~BufferProvider() {}
+};
+
+/**
+ * Class for handling data and notify callbacks to application
+ */
+class AppCallbackNotifier: public ErrorNotifier , public virtual android::RefBase
+{
+
+public:
+
+ ///Constants
+ static const int NOTIFIER_TIMEOUT;
+ static const int32_t MAX_BUFFERS = 8;
+
+ enum NotifierCommands
+ {
+ NOTIFIER_CMD_PROCESS_EVENT,
+ NOTIFIER_CMD_PROCESS_FRAME,
+ NOTIFIER_CMD_PROCESS_ERROR
+ };
+
+ enum NotifierState
+ {
+ NOTIFIER_STOPPED,
+ NOTIFIER_STARTED,
+ NOTIFIER_EXITED
+ };
+
+public:
+
+ ~AppCallbackNotifier();
+
+ ///Initialzes the callback notifier, creates any resources required
+ status_t initialize();
+
+ ///Starts the callbacks to application
+ status_t start();
+
+ ///Stops the callbacks from going to application
+ status_t stop();
+
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ void setFrameProvider(FrameNotifier *frameProvider);
+
+ //All sub-components of Camera HAL call this whenever any error happens
+ virtual void errorNotify(int error);
+
+ status_t startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t stopPreviewCallbacks();
+
+ status_t enableMsgType(int32_t msgType);
+ status_t disableMsgType(int32_t msgType);
+
+ //API for enabling/disabling measurement data
+ void setMeasurements(bool enable);
+
+ //thread loops
+ bool notificationThread();
+
+ ///Notification callback functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ static void eventCallbackRelay(CameraHalEvent* chEvt);
+ void frameCallback(CameraFrame* caFrame);
+ void eventCallback(CameraHalEvent* chEvt);
+ void flushAndReturnFrames();
+
+ void setCallbacks(CameraHal *cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ //Set Burst mode
+ void setBurst(bool burst);
+
+ //Notifications from CameraHal for video recording case
+ status_t startRecording();
+ status_t stopRecording();
+ status_t initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs);
+ status_t releaseRecordingFrame(const void *opaque);
+
+ status_t useMetaDataBufferMode(bool enable);
+
+ void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3);
+
+ void useVideoBuffers(bool useVideoBuffers);
+
+ bool getUesVideoBuffers();
+ void setVideoRes(int width, int height);
+
+ void flushEventQueue();
+ void setExternalLocking(bool extBuffLocking);
+
+ //Internal class definitions
+ class NotificationThread : public android::Thread {
+ AppCallbackNotifier* mAppCallbackNotifier;
+ Utils::MessageQueue mNotificationThreadQ;
+ public:
+ enum NotificationThreadCommands
+ {
+ NOTIFIER_START,
+ NOTIFIER_STOP,
+ NOTIFIER_EXIT,
+ };
+ public:
+ NotificationThread(AppCallbackNotifier* nh)
+ : Thread(false), mAppCallbackNotifier(nh) { }
+ virtual bool threadLoop() {
+ return mAppCallbackNotifier->notificationThread();
+ }
+
+ Utils::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ };
+
+ //Friend declarations
+ friend class NotificationThread;
+
+private:
+ void notifyEvent();
+ void notifyFrame();
+ bool processMessage();
+ void releaseSharedVideoBuffers();
+ status_t dummyRaw();
+ void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType);
+ void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
+ size_t calculateBufferSize(size_t width, size_t height, const char *pixelFormat);
+ const char* getContstantForPixelFormat(const char *pixelFormat);
+ void lockBufferAndUpdatePtrs(CameraFrame* frame);
+ void unlockBufferAndUpdatePtrs(CameraFrame* frame);
+
+private:
+ mutable android::Mutex mLock;
+ mutable android::Mutex mBurstLock;
+ CameraHal* mCameraHal;
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mRequestMemory;
+ void *mCallbackCookie;
+
+ //Keeps Video MemoryHeaps and Buffers within
+ //these objects
+ android::KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ android::KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMap;
+
+ //Keeps list of Gralloc handles and associated Video Metadata Buffers
+ android::KeyedVector<void *, camera_memory_t *> mVideoMetadataBufferMemoryMap;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMetadataBufferReverseMap;
+
+ bool mBufferReleased;
+
+ android::sp< NotificationThread> mNotificationThread;
+ EventProvider *mEventProvider;
+ FrameProvider *mFrameProvider;
+ Utils::MessageQueue mEventQ;
+ Utils::MessageQueue mFrameQ;
+ NotifierState mNotifierState;
+
+ bool mPreviewing;
+ camera_memory_t* mPreviewMemory;
+ CameraBuffer mPreviewBuffers[MAX_BUFFERS];
+ int mPreviewBufCount;
+ int mPreviewWidth;
+ int mPreviewHeight;
+ int mPreviewStride;
+ const char *mPreviewPixelFormat;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryHeapBase> > mSharedPreviewHeaps;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryBase> > mSharedPreviewBuffers;
+
+ //Burst mode active
+ bool mBurst;
+ mutable android::Mutex mRecordingLock;
+ bool mRecording;
+ bool mMeasurementEnabled;
+
+ bool mUseMetaDataBufferMode;
+ bool mRawAvailable;
+
+ bool mUseVideoBuffers;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+ bool mExternalLocking;
+
+};
+
+
+/**
+ * Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
+ */
+class MemoryManager : public BufferProvider, public virtual android::RefBase
+{
+public:
+ MemoryManager();
+ ~MemoryManager();
+
+ status_t initialize();
+
+ int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
+ virtual uint32_t * getOffsets();
+ virtual int getFd() ;
+ virtual int freeBufferList(CameraBuffer * buflist);
+
+private:
+ android::sp<ErrorNotifier> mErrorNotifier;
+ /*These are secific to DRM buffers*/
+ struct omap_device *mOmapDev;
+ int mOmapDrm_Fd;
+};
+
+
+
+
+/**
+ * CameraAdapter interface class
+ * Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
+ */
+
+class CameraAdapter: public FrameNotifier, public virtual android::RefBase
+{
+protected:
+ enum AdapterActiveStates {
+ INTIALIZED_ACTIVE = 1 << 0,
+ LOADED_PREVIEW_ACTIVE = 1 << 1,
+ PREVIEW_ACTIVE = 1 << 2,
+ LOADED_CAPTURE_ACTIVE = 1 << 3,
+ CAPTURE_ACTIVE = 1 << 4,
+ BRACKETING_ACTIVE = 1 << 5,
+ AF_ACTIVE = 1 << 6,
+ ZOOM_ACTIVE = 1 << 7,
+ VIDEO_ACTIVE = 1 << 8,
+ LOADED_REPROCESS_ACTIVE = 1 << 9,
+ REPROCESS_ACTIVE = 1 << 10,
+ };
+public:
+ typedef struct
+ {
+ CameraBuffer *mBuffers;
+ uint32_t *mOffsets;
+ int mFd;
+ size_t mLength;
+ size_t mCount;
+ size_t mMaxQueueable;
+ } BuffersDescriptor;
+
+ enum CameraCommands
+ {
+ CAMERA_START_PREVIEW = 0,
+ CAMERA_STOP_PREVIEW = 1,
+ CAMERA_START_VIDEO = 2,
+ CAMERA_STOP_VIDEO = 3,
+ CAMERA_START_IMAGE_CAPTURE = 4,
+ CAMERA_STOP_IMAGE_CAPTURE = 5,
+ CAMERA_PERFORM_AUTOFOCUS = 6,
+ CAMERA_CANCEL_AUTOFOCUS = 7,
+ CAMERA_PREVIEW_FLUSH_BUFFERS = 8,
+ CAMERA_START_SMOOTH_ZOOM = 9,
+ CAMERA_STOP_SMOOTH_ZOOM = 10,
+ CAMERA_USE_BUFFERS_PREVIEW = 11,
+ CAMERA_SET_TIMEOUT = 12,
+ CAMERA_CANCEL_TIMEOUT = 13,
+ CAMERA_START_BRACKET_CAPTURE = 14,
+ CAMERA_STOP_BRACKET_CAPTURE = 15,
+ CAMERA_QUERY_RESOLUTION_PREVIEW = 16,
+ CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE = 17,
+ CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA = 18,
+ CAMERA_USE_BUFFERS_IMAGE_CAPTURE = 19,
+ CAMERA_USE_BUFFERS_PREVIEW_DATA = 20,
+ CAMERA_TIMEOUT_EXPIRED = 21,
+ CAMERA_START_FD = 22,
+ CAMERA_STOP_FD = 23,
+ CAMERA_SWITCH_TO_EXECUTING = 24,
+ CAMERA_USE_BUFFERS_VIDEO_CAPTURE = 25,
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CAMERA_USE_BUFFERS_REPROCESS = 26,
+ CAMERA_START_REPROCESS = 27,
+#endif
+#ifdef OMAP_ENHANCEMENT_VTC
+ CAMERA_SETUP_TUNNEL = 28,
+ CAMERA_DESTROY_TUNNEL = 29,
+#endif
+ CAMERA_PREVIEW_INITIALIZATION = 30,
+ };
+
+ enum CameraMode
+ {
+ CAMERA_PREVIEW,
+ CAMERA_IMAGE_CAPTURE,
+ CAMERA_VIDEO,
+ CAMERA_MEASUREMENT,
+ CAMERA_REPROCESS,
+ };
+
+ enum AdapterState {
+ INTIALIZED_STATE = INTIALIZED_ACTIVE,
+ LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
+ AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_STATE = LOADED_REPROCESS_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_CAPTURE_STATE = LOADED_REPROCESS_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ REPROCESS_STATE = REPROCESS_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ };
+
+
+public:
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual int initialize(CameraProperties::Properties*) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs,
+ frame_callback callback = NULL,
+ event_callback eventCb = NULL,
+ void *cookie = NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, android_ycbcr *ycbcr) = 0;
+ virtual void removeFramePointers() = 0;
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual int setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
+
+ //Registers callback for returning image buffers back to CameraHAL
+ virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
+
+ //Registers callback, which signals a completed image capture
+ virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0, int value4=0) = 0;
+
+ virtual ~CameraAdapter() {};
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState() = 0;
+
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState() = 0;
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt) = 0;
+
+ // Rolls the state machine back to INTIALIZED_STATE from the current state
+ virtual status_t rollbackToInitializedState() = 0;
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state) = 0;
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state) = 0;
+
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) = 0;
+
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation) = 0;
+ virtual status_t commitState() = 0;
+ virtual status_t rollbackState() = 0;
+};
+
+class DisplayAdapter : public BufferProvider, public virtual android::RefBase
+{
+public:
+ DisplayAdapter();
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ preview_stream_extended_ops_t * extendedOps() const {
+ return mExtendedOps;
+ }
+
+ void setExtendedOps(preview_stream_extended_ops_t * extendedOps);
+#endif
+
+ ///Initializes the display adapter creates any resources required
+ virtual int initialize() = 0;
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window) = 0;
+ virtual int setFrameProvider(FrameNotifier *frameProvider) = 0;
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL) = 0;
+ virtual int disableDisplay(bool cancel_buffer = true) = 0;
+ //Used for Snapshot review temp. pause
+ virtual int pauseDisplay(bool pause) = 0;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for shot to snapshot measurement
+ virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0;
+#endif
+
+ virtual bool supportsExternalBuffering() = 0;
+
+ // Get max queueable buffers display supports
+ // This function should only be called after
+ // allocateBufferList
+ virtual status_t maxQueueableBuffers(unsigned int& queueable) = 0;
+
+ // Get min buffers display needs at any given time
+ virtual status_t minUndequeueableBuffers(int& unqueueable) = 0;
+
+ // Given a vector of DisplayAdapters find the one corresponding to str
+ virtual bool match(const char * str) { return false; }
+
+private:
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ preview_stream_extended_ops_t * mExtendedOps;
+#endif
+};
+
+static void releaseImageBuffers(void *userData);
+
+static void endImageCapture(void *userData);
+
+ /**
+ Implementation of the Android Camera hardware abstraction layer
+
+ This class implements the interface methods defined in CameraHardwareInterface
+ for the OMAP4 platform
+
+*/
+class CameraHal
+
+{
+
+public:
+ enum SocFamily {
+ SocFamily_Undefined = -1,
+ SocFamily_Omap4430 = 0,
+ SocFamily_Omap4460,
+ SocFamily_Omap4470,
+ SocFamily_ElementCount // element count of SocFamily
+ };
+
+ ///Constants
+ static const int NO_BUFFERS_PREVIEW;
+ static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ static const uint32_t VFR_SCALE = 1000;
+
+
+ /*--------------------Interface Methods---------------------------------*/
+
+ //@{
+public:
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /** Receives orientation events from SensorListener **/
+ void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ /**
+ * The following three functions all take a msgtype,
+ * which is a bitmask of the messages defined in
+ * include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msgType);
+
+ /**
+ * Disable a message, or a set of messages.
+ */
+ void disableMsgType(int32_t msgType);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled.
+ * Note that this is operates as an AND, if any of the messages
+ * queried are off, this will return false.
+ */
+ int msgTypeEnabled(int32_t msgType);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+
+ /**
+ * Set preview mode related initialization.
+ * Only used when slice based processing is enabled.
+ */
+ int cameraPreviewInitialization();
+
+ /**
+ * Only used if overlays are used for camera preview.
+ */
+ int setPreviewWindow(struct preview_stream_ops *window);
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ void setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops);
+
+ /**
+ * Set a tap-in or tap-out point.
+ */
+ int setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+#endif
+
+ /**
+ * Release a tap-in or tap-out point.
+ */
+ int releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ bool previewEnabled();
+
+ /**
+ * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
+ * message is sent with the corresponding frame. Every record frame must be released
+ * by calling releaseRecordingFrame().
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called
+ * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
+ * will be called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress
+ * or not, this function will return the focus position to the default.
+ * If the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture(const char* params);
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this
+ * method when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /** Set the camera parameters. */
+ int setParameters(const char* params);
+ int setParameters(const android::CameraParameters& params);
+
+ /** Return the camera parameters. */
+ char* getParameters();
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd) const;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ /**
+ * start a reprocessing operation.
+ */
+ int reprocess(const char* params);
+
+ /**
+ * cancels current reprocessing operation
+ */
+ int cancel_reprocess();
+#endif
+
+ status_t storeMetaDataInBuffers(bool enable);
+
+ // Use external locking for graphic buffers
+ void setExternalLocking(bool extBuffLocking);
+
+ //@}
+
+/*--------------------Internal Member functions - Public---------------------------------*/
+
+public:
+ /** @name internalFunctionsPublic */
+ //@{
+
+ /** Constructor of CameraHal */
+ CameraHal(int cameraId);
+
+ // Destructor of CameraHal
+ ~CameraHal();
+
+ /** Initialize CameraHal */
+ status_t initialize(CameraProperties::Properties*);
+
+ /** Deinitialize CameraHal */
+ void deinitialize();
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Uses the constructor timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *);
+ //Uses a user provided timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *, struct timeval*, ...);
+
+#endif
+
+ /** Free image bufs */
+ status_t freeImageBufs();
+
+ //Signals the end of image capture
+ status_t signalEndImageCapture();
+
+ //Events
+ static void eventCallbackRelay(CameraHalEvent* event);
+ void eventCallback(CameraHalEvent* event);
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+
+ static const char* getPixelFormatConstant(const char* parameters_format);
+ static size_t calculateBufferSize(const char* parameters_format, int width, int height);
+ static void getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format);
+ static unsigned int getBPP(const char* format);
+ static bool parsePair(const char *str, int *first, int *second, char delim);
+
+/*--------------------Internal Member functions - Private---------------------------------*/
+private:
+
+ /** @name internalFunctionsPrivate */
+ //@{
+
+ /** Set the camera parameters specific to Video Recording. */
+ bool setVideoModeParameters(const android::CameraParameters&);
+
+ /** Reset the camera parameters specific to Video Recording. */
+ bool resetVideoModeParameters();
+
+ /** Restart the preview with setParameter. */
+ status_t restartPreview();
+
+ status_t parseResolution(const char *resStr, int &width, int &height);
+
+ void insertSupportedParams();
+
+ /** Allocate preview data buffers */
+ status_t allocPreviewDataBufs(size_t size, size_t bufferCount);
+
+ /** Free preview data buffers */
+ status_t freePreviewDataBufs();
+
+ /** Allocate preview buffers */
+ status_t allocPreviewBufs(int width, int height, const char* previewFormat, unsigned int bufferCount, unsigned int &max_queueable);
+
+ /** Allocate video buffers */
+ status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount);
+
+ /** Allocate image capture buffers */
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length,
+ const char* previewFormat, unsigned int bufferCount);
+
+ /** Allocate Raw buffers */
+ status_t allocRawBufs(int width, int height, const char* previewFormat, int bufferCount);
+
+ /** Free preview buffers */
+ status_t freePreviewBufs();
+
+ /** Free video bufs */
+ status_t freeVideoBufs(CameraBuffer *bufs);
+
+ /** Free RAW bufs */
+ status_t freeRawBufs();
+
+ //Check if a given resolution is supported by the current camera
+ //instance
+ bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+
+ //Check if a given variable frame rate range is supported by the current camera
+ //instance
+ bool isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges);
+
+ //Check if a given parameter is supported by the current camera
+ // instance
+ bool isParameterValid(const char *param, const char *supportedParams);
+ bool isParameterValid(int param, const char *supportedParams);
+ status_t doesSetParameterNeedUpdate(const char *new_param, const char *old_params, bool &update);
+
+ /** Initialize default parameters */
+ void initDefaultParameters();
+
+ void dumpProperties(CameraProperties::Properties& cameraProps);
+
+ status_t startImageBracketing();
+
+ status_t stopImageBracketing();
+
+ void setShutter(bool enable);
+
+ void forceStopPreview();
+
+ void resetPreviewRes(android::CameraParameters *params);
+
+ // Internal __takePicture function - used in public takePicture() and reprocess()
+ int __takePicture(const char* params, struct timeval *captureStart = NULL);
+ //@}
+
+ status_t setTapoutLocked(struct preview_stream_ops *out);
+ status_t releaseTapoutLocked(struct preview_stream_ops *out);
+ status_t setTapinLocked(struct preview_stream_ops *in);
+ status_t releaseTapinLocked(struct preview_stream_ops *in);
+
+ static SocFamily getSocFamily();
+/*----------Member variables - Public ---------------------*/
+public:
+ int32_t mMsgEnabled;
+ bool mRecordEnabled;
+ nsecs_t mCurrentTime;
+ bool mFalsePreview;
+ bool mPreviewEnabled;
+ uint32_t mTakePictureQueue;
+ bool mBracketingEnabled;
+ bool mBracketingRunning;
+ //User shutter override
+ bool mShutterEnabled;
+ bool mMeasurementEnabled;
+ //Google's parameter delimiter
+ static const char PARAMS_DELIMITER[];
+
+ CameraAdapter *mCameraAdapter;
+ android::sp<AppCallbackNotifier> mAppCallbackNotifier;
+ android::sp<DisplayAdapter> mDisplayAdapter;
+ android::sp<MemoryManager> mMemoryManager;
+
+ android::Vector< android::sp<DisplayAdapter> > mOutAdapters;
+ android::Vector< android::sp<DisplayAdapter> > mInAdapters;
+
+ // TODO(XXX): Even though we support user setting multiple BufferSourceAdapters now
+ // only one tap in surface and one tap out surface is supported at a time.
+ android::sp<DisplayAdapter> mBufferSourceAdapter_In;
+ android::sp<DisplayAdapter> mBufferSourceAdapter_Out;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ preview_stream_extended_ops_t * mExtendedPreviewStreamOps;
+#endif
+
+ android::sp<android::IMemoryHeap> mPictureHeap;
+
+ int* mGrallocHandles;
+ bool mFpsRangeChangedByApp;
+
+
+ int mRawWidth;
+ int mRawHeight;
+ bool mRawCapture;
+
+
+///static member vars
+
+ static const int SW_SCALING_FPS_LIMIT;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Timestamp from the CameraHal constructor
+ static struct timeval ppm_start;
+ //Timestamp of the autoFocus command
+ static struct timeval mStartFocus;
+ //Timestamp of the startPreview command
+ static struct timeval mStartPreview;
+ //Timestamp of the takePicture command
+ static struct timeval mStartCapture;
+
+#endif
+
+/*----------Member variables - Private ---------------------*/
+private:
+ bool mDynamicPreviewSwitch;
+ //keeps paused state of display
+ bool mDisplayPaused;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ bool mTunnelSetup;
+ bool mVTCUseCase;
+#endif
+
+ //Index of current camera adapter
+ int mCameraIndex;
+
+ mutable android::Mutex mLock;
+
+ android::sp<SensorListener> mSensorListener;
+
+ void* mCameraAdapterHandle;
+
+ android::CameraParameters mParameters;
+ bool mPreviewRunning;
+ bool mPreviewStateOld;
+ bool mRecordingEnabled;
+ EventProvider *mEventProvider;
+
+ CameraBuffer *mPreviewDataBuffers;
+ uint32_t *mPreviewDataOffsets;
+ int mPreviewDataFd;
+ int mPreviewDataLength;
+ CameraBuffer *mImageBuffers;
+ uint32_t *mImageOffsets;
+ int mImageFd;
+ int mImageLength;
+ unsigned int mImageCount;
+ CameraBuffer *mPreviewBuffers;
+ uint32_t *mPreviewOffsets;
+ int mPreviewLength;
+ int mPreviewFd;
+ CameraBuffer *mVideoBuffers;
+ uint32_t *mVideoOffsets;
+ int mVideoFd;
+ int mVideoLength;
+
+ int mBracketRangePositive;
+ int mBracketRangeNegative;
+
+ ///@todo Rename this as preview buffer provider
+ BufferProvider *mBufProvider;
+ BufferProvider *mVideoBufProvider;
+
+
+ CameraProperties::Properties* mCameraProperties;
+
+ bool mPreviewStartInProgress;
+ bool mPreviewInitializationDone;
+
+ bool mSetPreviewWindowCalled;
+
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+ int32_t mMaxZoomSupported;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+ android::String8 mCapModeBackup;
+
+ bool mExternalLocking;
+
+ const SocFamily mSocFamily;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
new file mode 100644
index 0000000..dfba9b7
--- /dev/null
+++ b/camera/inc/CameraProperties.h
@@ -0,0 +1,242 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#ifndef CAMERA_PROPERTIES_H
+#define CAMERA_PROPERTIES_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <dirent.h>
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include "cutils/properties.h"
+
+#include "Common.h"
+
+namespace Ti {
+namespace Camera {
+
+#define MAX_CAMERAS_SUPPORTED 1
+#define MAX_SIMUL_CAMERAS_SUPPORTED 1
+#define MAX_PROP_NAME_LENGTH 50
+#define MAX_PROP_VALUE_LENGTH 2048
+
+#define REMAINING_BYTES(buff) ((((int)sizeof(buff) - 1 - (int)strlen(buff)) < 0) ? 0 : (sizeof(buff) - 1 - strlen(buff)))
+
+enum OperatingMode {
+ MODE_HIGH_SPEED = 0,
+ MODE_HIGH_QUALITY,
+ MODE_ZEROSHUTTERLAG,
+ MODE_VIDEO,
+ MODE_STEREO,
+ MODE_CPCAM,
+ MODE_VIDEO_HIGH_QUALITY,
+ MODE_MAX
+};
+
+// Class that handles the Camera Properties
+class CameraProperties
+{
+public:
+ static const char INVALID[];
+ static const char CAMERA_NAME[];
+ static const char CAMERA_SENSOR_INDEX[];
+ static const char CAMERA_SENSOR_ID[];
+ static const char ORIENTATION_INDEX[];
+ static const char FACING_INDEX[];
+ static const char SUPPORTED_PREVIEW_SIZES[];
+ static const char SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
+ static const char SUPPORTED_PREVIEW_FORMATS[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES_EXT[];
+ static const char SUPPORTED_PICTURE_SIZES[];
+ static const char SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
+ static const char SUPPORTED_PICTURE_FORMATS[];
+ static const char SUPPORTED_THUMBNAIL_SIZES[];
+ static const char SUPPORTED_WHITE_BALANCE[];
+ static const char SUPPORTED_EFFECTS[];
+ static const char SUPPORTED_ANTIBANDING[];
+ static const char SUPPORTED_EXPOSURE_MODES[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MIN[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MAX[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_STEP[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_STEP[];
+ static const char SUPPORTED_EV_MIN[];
+ static const char SUPPORTED_EV_MAX[];
+ static const char SUPPORTED_EV_STEP[];
+ static const char SUPPORTED_ISO_VALUES[];
+ static const char SUPPORTED_SCENE_MODES[];
+ static const char SUPPORTED_FLASH_MODES[];
+ static const char SUPPORTED_FOCUS_MODES[];
+ static const char REQUIRED_PREVIEW_BUFS[];
+ static const char REQUIRED_IMAGE_BUFS[];
+ static const char SUPPORTED_ZOOM_RATIOS[];
+ static const char SUPPORTED_ZOOM_STAGES[];
+ static const char SUPPORTED_IPP_MODES[];
+ static const char SMOOTH_ZOOM_SUPPORTED[];
+ static const char ZOOM_SUPPORTED[];
+ static const char PREVIEW_SIZE[];
+ static const char PREVIEW_FORMAT[];
+ static const char PREVIEW_FRAME_RATE[];
+ static const char ZOOM[];
+ static const char PICTURE_SIZE[];
+ static const char PICTURE_FORMAT[];
+ static const char JPEG_THUMBNAIL_SIZE[];
+ static const char WHITEBALANCE[];
+ static const char EFFECT[];
+ static const char ANTIBANDING[];
+ static const char EXPOSURE_MODE[];
+ static const char EV_COMPENSATION[];
+ static const char ISO_MODE[];
+ static const char FOCUS_MODE[];
+ static const char SCENE_MODE[];
+ static const char FLASH_MODE[];
+ static const char JPEG_QUALITY[];
+ static const char BRIGHTNESS[];
+ static const char SATURATION[];
+ static const char SHARPNESS[];
+ static const char CONTRAST[];
+ static const char IPP[];
+ static const char GBCE[];
+ static const char SUPPORTED_GBCE[];
+ static const char GLBCE[];
+ static const char SUPPORTED_GLBCE[];
+ static const char AUTOCONVERGENCE_MODE[];
+ static const char AUTOCONVERGENCE_MODE_VALUES[];
+ static const char MANUAL_CONVERGENCE[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_STEP[];
+ static const char SENSOR_ORIENTATION[];
+ static const char SENSOR_ORIENTATION_VALUES[];
+ static const char REVISION[];
+ static const char FOCAL_LENGTH[];
+ static const char HOR_ANGLE[];
+ static const char VER_ANGLE[];
+ static const char EXIF_MAKE[];
+ static const char EXIF_MODEL[];
+ static const char JPEG_THUMBNAIL_QUALITY[];
+ static const char MAX_FOCUS_AREAS[];
+ static const char MAX_FD_HW_FACES[];
+ static const char MAX_FD_SW_FACES[];
+
+ static const char MAX_PICTURE_WIDTH[];
+ static const char MAX_PICTURE_HEIGHT[];
+
+ static const char PARAMS_DELIMITER [];
+
+ static const char S3D_PRV_FRAME_LAYOUT[];
+ static const char S3D_PRV_FRAME_LAYOUT_VALUES[];
+ static const char S3D_CAP_FRAME_LAYOUT[];
+ static const char S3D_CAP_FRAME_LAYOUT_VALUES[];
+ static const char VSTAB[];
+ static const char VSTAB_SUPPORTED[];
+ static const char VNF[];
+ static const char VNF_SUPPORTED[];
+ static const char FRAMERATE_RANGE[];
+ static const char FRAMERATE_RANGE_SUPPORTED[];
+ static const char FRAMERATE_RANGE_EXT_SUPPORTED[];
+
+ static const char DEFAULT_VALUE[];
+
+ static const char AUTO_EXPOSURE_LOCK[];
+ static const char AUTO_EXPOSURE_LOCK_SUPPORTED[];
+ static const char AUTO_WHITEBALANCE_LOCK[];
+ static const char AUTO_WHITEBALANCE_LOCK_SUPPORTED[];
+ static const char MAX_NUM_METERING_AREAS[];
+ static const char METERING_AREAS[];
+ static const char MAX_NUM_FOCUS_AREAS[];
+
+ static const char VIDEO_SNAPSHOT_SUPPORTED[];
+
+ static const char VIDEO_SIZE[];
+ static const char SUPPORTED_VIDEO_SIZES[];
+
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+ static const char RAW_WIDTH[];
+ static const char RAW_HEIGHT[];
+
+ static const char CAP_MODE_VALUES[];
+
+ CameraProperties();
+ ~CameraProperties();
+
+ // container class passed around for accessing properties
+ class Properties
+ {
+ public:
+
+ Properties()
+ {
+ }
+
+ ~Properties()
+ {
+ }
+
+ void set(const char *prop, const char *value);
+ void set(const char *prop, int value);
+ const char* get(const char * prop) const;
+ int getInt(const char * prop) const;
+ void setSensorIndex(int idx);
+ void setMode(OperatingMode mode);
+ OperatingMode getMode() const;
+ void dump();
+
+ protected:
+ const char* keyAt(const unsigned int) const;
+ const char* valueAt(const unsigned int) const;
+
+ private:
+ OperatingMode mCurrentMode;
+ android::DefaultKeyedVector<android::String8, android::String8> mProperties[MODE_MAX];
+
+ };
+
+ ///Initializes the CameraProperties class
+ status_t initialize();
+ status_t loadProperties();
+ int camerasSupported();
+ int getProperties(int cameraIndex, Properties** properties);
+
+private:
+
+ int mCamerasSupported;
+ int mInitialized;
+ mutable android::Mutex mLock;
+
+ Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
+
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif //CAMERA_PROPERTIES_H
diff --git a/camera/inc/Common.h b/camera/inc/Common.h
new file mode 100644
index 0000000..b369e65
--- /dev/null
+++ b/camera/inc/Common.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERAHAL_COMMON_H
+#define CAMERAHAL_COMMON_H
+
+#include "UtilsCommon.h"
+#include "DebugUtils.h"
+#include "Status.h"
+
+
+
+
+// logging functions
+#ifdef CAMERAHAL_DEBUG
+# define CAMHAL_LOGD DBGUTILS_LOGD
+# define CAMHAL_LOGDA DBGUTILS_LOGDA
+# define CAMHAL_LOGDB DBGUTILS_LOGDB
+# ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOGV DBGUTILS_LOGV
+# define CAMHAL_LOGVA DBGUTILS_LOGVA
+# define CAMHAL_LOGVB DBGUTILS_LOGVB
+# else
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+# endif
+#else
+# define CAMHAL_LOGD(...)
+# define CAMHAL_LOGDA(str)
+# define CAMHAL_LOGDB(str, ...)
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+#endif
+
+#define CAMHAL_LOGI DBGUTILS_LOGI
+#define CAMHAL_LOGW DBGUTILS_LOGW
+#define CAMHAL_LOGE DBGUTILS_LOGE
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+#define CAMHAL_LOGF DBGUTILS_LOGF
+
+#define CAMHAL_ASSERT DBGUTILS_ASSERT
+#define CAMHAL_ASSERT_X DBGUTILS_ASSERT_X
+
+#define CAMHAL_UNUSED(x) (void)x
+
+
+
+
+#endif // CAMERAHAL_COMMON_H
diff --git a/camera/inc/DecoderFactory.h b/camera/inc/DecoderFactory.h
new file mode 100644
index 0000000..d5e566f
--- /dev/null
+++ b/camera/inc/DecoderFactory.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DECODERFACTORY_H_
+#define DECODERFACTORY_H_
+
+#include "FrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+class DecoderFactory {
+ DecoderFactory();
+ ~DecoderFactory();
+public:
+ static FrameDecoder* createDecoderByType(DecoderType type, bool forceSwDecoder = false);
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* DECODERFACTORY_H_ */
diff --git a/camera/inc/Decoder_libjpeg.h b/camera/inc/Decoder_libjpeg.h
new file mode 100644
index 0000000..425ebf1
--- /dev/null
+++ b/camera/inc/Decoder_libjpeg.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+#define ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+
+#include "CameraHal.h"
+
+extern "C" {
+#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
+}
+
+
+namespace Ti {
+namespace Camera {
+
+class Decoder_libjpeg
+{
+
+public:
+ Decoder_libjpeg();
+ ~Decoder_libjpeg();
+ static int readDHTSize();
+ static bool isDhtExist(unsigned char *jpeg_src, int filled_len);
+ static int appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size);
+ bool decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride);
+
+private:
+ void release();
+ unsigned char **Y_Plane;
+ unsigned char **U_Plane;
+ unsigned char **V_Plane;
+ unsigned char *UV_Plane;
+ unsigned int mWidth, mHeight;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
diff --git a/camera/inc/Encoder_libjpeg.h b/camera/inc/Encoder_libjpeg.h
new file mode 100644
index 0000000..72feb08
--- /dev/null
+++ b/camera/inc/Encoder_libjpeg.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file Encoder_libjpeg.h
+*
+* This defines API for camerahal to encode YUV using libjpeg
+*
+*/
+
+#ifndef ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H
+#define ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H
+
+#include <utils/threads.h>
+#include <utils/RefBase.h>
+
+extern "C" {
+#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
+}
+
+#include "CameraHal.h"
+
+#define CANCEL_TIMEOUT 5000000 // 5 seconds
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * libjpeg encoder class - uses libjpeg to encode yuv
+ */
+
+#define MAX_EXIF_TAGS_SUPPORTED 30
+typedef void (*encoder_libjpeg_callback_t) (void* main_jpeg,
+ void* thumb_jpeg,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3,
+ void* cookie4,
+ bool canceled);
+
+// these have to match strings defined in external/jhead/exif.c
+static const char TAG_MODEL[] = "Model";
+static const char TAG_MAKE[] = "Make";
+static const char TAG_FOCALLENGTH[] = "FocalLength";
+static const char TAG_DATETIME[] = "DateTime";
+static const char TAG_IMAGE_WIDTH[] = "ImageWidth";
+static const char TAG_IMAGE_LENGTH[] = "ImageLength";
+static const char TAG_GPS_LAT[] = "GPSLatitude";
+static const char TAG_GPS_LAT_REF[] = "GPSLatitudeRef";
+static const char TAG_GPS_LONG[] = "GPSLongitude";
+static const char TAG_GPS_LONG_REF[] = "GPSLongitudeRef";
+static const char TAG_GPS_ALT[] = "GPSAltitude";
+static const char TAG_GPS_ALT_REF[] = "GPSAltitudeRef";
+static const char TAG_GPS_MAP_DATUM[] = "GPSMapDatum";
+static const char TAG_GPS_PROCESSING_METHOD[] = "GPSProcessingMethod";
+static const char TAG_GPS_VERSION_ID[] = "GPSVersionID";
+static const char TAG_GPS_TIMESTAMP[] = "GPSTimeStamp";
+static const char TAG_GPS_DATESTAMP[] = "GPSDateStamp";
+static const char TAG_ORIENTATION[] = "Orientation";
+static const char TAG_FLASH[] = "Flash";
+static const char TAG_DIGITALZOOMRATIO[] = "DigitalZoomRatio";
+static const char TAG_EXPOSURETIME[] = "ExposureTime";
+static const char TAG_APERTURE[] = "ApertureValue";
+static const char TAG_ISO_EQUIVALENT[] = "ISOSpeedRatings";
+static const char TAG_WHITEBALANCE[] = "WhiteBalance";
+static const char TAG_LIGHT_SOURCE[] = "LightSource";
+static const char TAG_METERING_MODE[] = "MeteringMode";
+static const char TAG_EXPOSURE_PROGRAM[] = "ExposureProgram";
+static const char TAG_COLOR_SPACE[] = "ColorSpace";
+static const char TAG_CPRS_BITS_PER_PIXEL[] = "CompressedBitsPerPixel";
+static const char TAG_FNUMBER[] = "FNumber";
+static const char TAG_SHUTTERSPEED[] = "ShutterSpeedValue";
+static const char TAG_SENSING_METHOD[] = "SensingMethod";
+static const char TAG_CUSTOM_RENDERED[] = "CustomRendered";
+
+class ExifElementsTable {
+ public:
+ ExifElementsTable() :
+ gps_tag_count(0), exif_tag_count(0), position(0),
+ jpeg_opened(false)
+ {
+#ifdef ANDROID_API_JB_OR_LATER
+ has_datetime_tag = false;
+#endif
+ }
+ ~ExifElementsTable();
+
+ status_t insertElement(const char* tag, const char* value);
+ void insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size);
+ status_t insertExifThumbnailImage(const char*, int);
+ void saveJpeg(unsigned char* picture, size_t jpeg_size);
+ static const char* degreesToExifOrientation(unsigned int);
+ static void stringToRational(const char*, unsigned int*, unsigned int*);
+ static bool isAsciiTag(const char* tag);
+ private:
+ ExifElement_t table[MAX_EXIF_TAGS_SUPPORTED];
+ unsigned int gps_tag_count;
+ unsigned int exif_tag_count;
+ unsigned int position;
+ bool jpeg_opened;
+#ifdef ANDROID_API_JB_OR_LATER
+ bool has_datetime_tag;
+#endif
+};
+
+class Encoder_libjpeg : public android::Thread {
+ /* public member types and variables */
+ public:
+ struct params {
+ uint8_t* src;
+ int src_size;
+ uint8_t* dst;
+ int dst_size;
+ int quality;
+ int in_width;
+ int in_height;
+ int out_width;
+ int out_height;
+ int right_crop;
+ int start_offset;
+ const char* format;
+ size_t jpeg_size;
+ };
+ /* public member functions */
+ public:
+ Encoder_libjpeg(params* main_jpeg,
+ params* tn_jpeg,
+ encoder_libjpeg_callback_t cb,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3, void *cookie4)
+ : android::Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
+ mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3), mCookie4(cookie4),
+ mType(type), mThumb(NULL) {
+ this->incStrong(this);
+ mCancelSem.Create(0);
+ }
+
+ ~Encoder_libjpeg() {
+ CAMHAL_LOGVB("~Encoder_libjpeg(%p)", this);
+ }
+
+ virtual bool threadLoop() {
+ size_t size = 0;
+ if (mThumbnailInput) {
+ // start thread to encode thumbnail
+ mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL, NULL);
+ mThumb->run();
+ }
+
+ // encode our main image
+ size = encode(mMainInput);
+
+ // signal cancel semaphore incase somebody is waiting
+ mCancelSem.Signal();
+
+ // check if it is main jpeg thread
+ if(mThumb.get()) {
+ // wait until tn jpeg thread exits.
+ mThumb->join();
+ mThumb.clear();
+ mThumb = NULL;
+ }
+
+ if(mCb) {
+ mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCookie4, mCancelEncoding);
+ }
+
+ // encoder thread runs, self-destructs, and then exits
+ this->decStrong(this);
+ return false;
+ }
+
+ void cancel() {
+ mCancelEncoding = true;
+ if (mThumb.get()) {
+ mThumb->cancel();
+ mCancelSem.WaitTimeout(CANCEL_TIMEOUT);
+ }
+ }
+
+ void getCookies(void **cookie1, void **cookie2, void **cookie3) {
+ if (cookie1) *cookie1 = mCookie1;
+ if (cookie2) *cookie2 = mCookie2;
+ if (cookie3) *cookie3 = mCookie3;
+ }
+
+ private:
+ params* mMainInput;
+ params* mThumbnailInput;
+ encoder_libjpeg_callback_t mCb;
+ bool mCancelEncoding;
+ void* mCookie1;
+ void* mCookie2;
+ void* mCookie3;
+ void* mCookie4;
+ CameraFrame::FrameType mType;
+ android::sp<Encoder_libjpeg> mThumb;
+ Utils::Semaphore mCancelSem;
+
+ size_t encode(params*);
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/inc/FrameDecoder.h b/camera/inc/FrameDecoder.h
new file mode 100644
index 0000000..fab0544
--- /dev/null
+++ b/camera/inc/FrameDecoder.h
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEDECODER_H_
+#define FRAMEDECODER_H_
+
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+#include "CameraHal.h"
+
+
+namespace Ti {
+namespace Camera {
+
+enum DecoderType {
+ DecoderType_MJPEG,
+ DecoderType_H264
+};
+
+enum BufferStatus {
+ BufferStatus_Unknown,
+ BufferStatus_InQueued,
+ BufferStatus_InWaitForEmpty,
+ BufferStatus_InDecoded,
+ BufferStatus_OutQueued,
+ BufferStatus_OutWaitForFill,
+ BufferStatus_OutFilled
+};
+
+enum DecoderState {
+ DecoderState_Uninitialized,
+ DecoderState_Initialized,
+ DecoderState_Running,
+ DecoderState_Requested_Stop,
+ DecoderState_Stoppped
+};
+
+class MediaBuffer: public virtual android::RefBase {
+
+public:
+ MediaBuffer()
+ : bufferId(-1), buffer(0), filledLen(0), size(0),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ MediaBuffer(int id, void* buffer, size_t buffSize = 0)
+ : bufferId(id), buffer(buffer), filledLen(0), size(buffSize),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ virtual ~MediaBuffer() {
+ }
+
+ int bufferId;
+ void* buffer;
+ int filledLen;
+ size_t size;
+
+ nsecs_t getTimestamp() const {
+ return mTimestamp;
+ }
+ void setTimestamp(nsecs_t ts) {
+ mTimestamp = ts;
+ }
+
+ BufferStatus getStatus() const {
+ return mStatus;
+ }
+
+ void setStatus(BufferStatus status) {
+ mStatus = status;
+ }
+
+ android::Mutex& getLock() const {
+ return mLock;
+ }
+
+ uint32_t getOffset() const {
+ return mOffset;
+ }
+
+ void setOffset(uint32_t offset) {
+ mOffset = offset;
+ }
+
+private:
+ uint32_t mOffset;
+ nsecs_t mTimestamp;
+ BufferStatus mStatus;
+ mutable android::Mutex mLock;
+};
+
+struct DecoderParameters {
+ int width;
+ int height;
+ int inputBufferCount;
+ int outputBufferCount;
+};
+
+class FrameDecoder {
+public:
+ FrameDecoder();
+ virtual ~FrameDecoder();
+ void configure(const DecoderParameters& config);
+ status_t start();
+ void stop();
+ void release();
+ void flush();
+ status_t queueInputBuffer(int id);
+ status_t dequeueInputBuffer(int &id);
+ status_t queueOutputBuffer(int id);
+ status_t dequeueOutputBuffer(int &id);
+
+ void registerOutputBuffers(android::Vector< android::sp<MediaBuffer> > *outBuffers) {
+ android::AutoMutex lock(mLock);
+ mOutQueue.clear();
+ mOutBuffers = outBuffers;
+ }
+
+ void registerInputBuffers(android::Vector< android::sp<MediaBuffer> > *inBuffers) {
+ android::AutoMutex lock(mLock);
+ mInQueue.clear();
+ mInBuffers = inBuffers;
+ }
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height) {
+ return false;
+ }
+
+ void setHal(CameraHal* hal) {
+ mCameraHal = hal;
+ }
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config) = 0;
+ virtual void doProcessInputBuffer() = 0;
+ virtual status_t doStart() = 0;
+ virtual void doStop() = 0;
+ virtual void doFlush() = 0;
+ virtual void doRelease() = 0;
+
+ DecoderParameters mParams;
+
+ android::Vector<int> mInQueue;
+ android::Vector<int> mOutQueue;
+
+ android::Vector< android::sp<MediaBuffer> >* mInBuffers;
+ android::Vector< android::sp<MediaBuffer> >* mOutBuffers;
+
+ CameraHal* mCameraHal;
+
+private:
+ DecoderState mState;
+ android::Mutex mLock;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* FRAMEDECODER_H_ */
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
new file mode 100644
index 0000000..c1e017c
--- /dev/null
+++ b/camera/inc/General3A_Settings.h
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file General3A_Settings.h
+*
+* This file maps the Camera Hardware Interface to OMX.
+*
+*/
+
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Common.h"
+#include "OMX_TI_Index.h"
+#include "TICameraParameters.h"
+
+#ifndef GENERAL_3A_SETTINGS_H
+#define GENERAL_3A_SETTINGS_H
+
+namespace Ti {
+namespace Camera {
+
+struct userToOMX_LUT{
+ const char * userDefinition;
+ int omxDefinition;
+};
+
+struct LUTtype{
+ int size;
+ const userToOMX_LUT *Table;
+};
+
+const userToOMX_LUT isoUserToOMX[] = {
+ { TICameraParameters::ISO_MODE_AUTO, 0 },
+ { TICameraParameters::ISO_MODE_100, 100 },
+ { TICameraParameters::ISO_MODE_200, 200 },
+ { TICameraParameters::ISO_MODE_400, 400 },
+ { TICameraParameters::ISO_MODE_800, 800 },
+ { TICameraParameters::ISO_MODE_1000, 1000 },
+ { TICameraParameters::ISO_MODE_1200, 1200 },
+ { TICameraParameters::ISO_MODE_1600, 1600 },
+};
+
+const userToOMX_LUT effects_UserToOMX [] = {
+ { android::CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { android::CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { android::CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { android::CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { android::CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { android::CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { android::CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { android::CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { android::CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
+ { TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
+ { TICameraParameters::EFFECT_COLOR_SWAP, OMX_ImageFilterColourSwap },
+ { TICameraParameters::EFFECT_BLACKWHITE, OMX_TI_ImageFilterBlackWhite }
+#endif
+};
+
+const userToOMX_LUT scene_UserToOMX [] = {
+ { android::CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { android::CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { android::CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { android::CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { android::CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { android::CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { android::CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { android::CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { android::CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { android::CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { android::CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { android::CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+ { android::CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+ { android::CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
+ { android::CameraParameters::SCENE_MODE_SPORTS, OMX_Sport },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
+ { TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
+ { TICameraParameters::SCENE_MODE_MOOD, OMX_Mood },
+ { TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor },
+ { TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document },
+ { TICameraParameters::SCENE_MODE_BARCODE, OMX_Barcode },
+ { TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT, OMX_SuperNight },
+ { TICameraParameters::SCENE_MODE_VIDEO_CINE, OMX_Cine },
+ { TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM, OMX_OldFilm },
+#endif
+};
+
+const userToOMX_LUT whiteBal_UserToOMX [] = {
+ { android::CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { android::CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { android::CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { android::CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { android::CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+ { android::CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { android::CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { android::CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
+ { TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
+ { TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset }
+#endif
+};
+
+const userToOMX_LUT antibanding_UserToOMX [] = {
+ { android::CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { android::CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { android::CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { android::CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+};
+
+const userToOMX_LUT focus_UserToOMX [] = {
+ { android::CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
+ { android::CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::FOCUS_MODE_FACE , OMX_IMAGE_FocusControlContinousFacePriority },
+ { TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
+ { TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended },
+#endif
+ { TICameraParameters::FOCUS_MODE_OFF , OMX_IMAGE_FocusControlOff }
+};
+
+const userToOMX_LUT exposure_UserToOMX [] = {
+ { TICameraParameters::EXPOSURE_MODE_MANUAL, OMX_ExposureControlOff },
+ { TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto },
+ { TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight },
+ { TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight },
+ { TICameraParameters::EXPOSURE_MODE_SPOTLIGHT, OMX_ExposureControlSpotLight},
+ { TICameraParameters::EXPOSURE_MODE_SPORTS, OMX_ExposureControlSports },
+ { TICameraParameters::EXPOSURE_MODE_SNOW, OMX_ExposureControlSnow },
+ { TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach },
+ { TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture },
+ { TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture },
+};
+
+const userToOMX_LUT flash_UserToOMX [] = {
+ { android::CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { android::CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { android::CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { android::CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { android::CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
+#endif
+};
+
+const LUTtype ExpLUT =
+ {
+ sizeof(exposure_UserToOMX)/sizeof(exposure_UserToOMX[0]),
+ exposure_UserToOMX
+ };
+
+const LUTtype WBalLUT =
+ {
+ sizeof(whiteBal_UserToOMX)/sizeof(whiteBal_UserToOMX[0]),
+ whiteBal_UserToOMX
+ };
+
+const LUTtype FlickerLUT =
+ {
+ sizeof(antibanding_UserToOMX)/sizeof(antibanding_UserToOMX[0]),
+ antibanding_UserToOMX
+ };
+
+const LUTtype SceneLUT =
+ {
+ sizeof(scene_UserToOMX)/sizeof(scene_UserToOMX[0]),
+ scene_UserToOMX
+ };
+
+const LUTtype FlashLUT =
+ {
+ sizeof(flash_UserToOMX)/sizeof(flash_UserToOMX[0]),
+ flash_UserToOMX
+ };
+
+const LUTtype EffLUT =
+ {
+ sizeof(effects_UserToOMX)/sizeof(effects_UserToOMX[0]),
+ effects_UserToOMX
+ };
+
+const LUTtype FocusLUT =
+ {
+ sizeof(focus_UserToOMX)/sizeof(focus_UserToOMX[0]),
+ focus_UserToOMX
+ };
+
+const LUTtype IsoLUT =
+ {
+ sizeof(isoUserToOMX)/sizeof(isoUserToOMX[0]),
+ isoUserToOMX
+ };
+
+/*
+* class Gen3A_settings
+* stores the 3A settings
+* also defines the look up tables
+* for mapping settings from Hal to OMX
+*/
+class Gen3A_settings{
+ public:
+
+ int Exposure;
+ int WhiteBallance;
+ int Flicker;
+ int SceneMode;
+ int Effect;
+ int Focus;
+ int EVCompensation;
+ int Contrast;
+ int Saturation;
+ int Sharpness;
+ int ISO;
+ int FlashMode;
+ int ManualExposure;
+ int ManualExposureRight;
+ int ManualGain;
+ int ManualGainRight;
+
+ unsigned int Brightness;
+ OMX_BOOL ExposureLock;
+ OMX_BOOL FocusLock;
+ OMX_BOOL WhiteBalanceLock;
+
+ OMX_BOOL AlgoExternalGamma;
+ OMX_BOOL AlgoNSF1;
+ OMX_BOOL AlgoNSF2;
+ OMX_BOOL AlgoSharpening;
+ OMX_BOOL AlgoThreeLinColorMap;
+ OMX_BOOL AlgoGIC;
+
+ OMX_TI_CONFIG_GAMMATABLE_TYPE mGammaTable;
+
+};
+
+/*
+* Flags raised when a setting is changed
+*/
+enum E3ASettingsFlags
+{
+ SetSceneMode = 1 << 0,
+ SetEVCompensation = 1 << 1,
+ SetWhiteBallance = 1 << 2,
+ SetFlicker = 1 << 3,
+ SetExposure = 1 << 4,
+ SetSharpness = 1 << 5,
+ SetBrightness = 1 << 6,
+ SetContrast = 1 << 7,
+ SetISO = 1 << 8,
+ SetSaturation = 1 << 9,
+ SetEffect = 1 << 10,
+ SetFocus = 1 << 11,
+ SetExpMode = 1 << 14,
+ SetFlash = 1 << 15,
+ SetExpLock = 1 << 16,
+ SetWBLock = 1 << 17,
+ SetMeteringAreas = 1 << 18,
+ SetManualExposure = 1 << 19,
+
+ SetAlgoExternalGamma = 1 << 20,
+ SetAlgoNSF1 = 1 << 21,
+ SetAlgoNSF2 = 1 << 22,
+ SetAlgoSharpening = 1 << 23,
+ SetAlgoThreeLinColorMap = 1 << 24,
+ SetAlgoGIC = 1 << 25,
+ SetGammaTable = 1 << 26,
+
+
+ E3aSettingMax,
+ E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif //GENERAL_3A_SETTINGS_H
diff --git a/camera/inc/NV12_resize.h b/camera/inc/NV12_resize.h
new file mode 100644
index 0000000..4b05a4f
--- /dev/null
+++ b/camera/inc/NV12_resize.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NV12_RESIZE_H_
+#define NV12_RESIZE_H_
+
+#include "Common.h"
+
+typedef unsigned char mmBool;
+typedef unsigned char mmUchar;
+typedef unsigned char mmUint8;
+typedef unsigned char mmByte;
+typedef unsigned short mmUint16;
+typedef unsigned int mmUint32;
+typedef unsigned long mmUint64;
+typedef signed char mmInt8;
+typedef char mmChar;
+typedef signed short mmInt16;
+typedef signed int mmInt32;
+typedef signed long mmLong;
+typedef signed int mmHandle;
+typedef float mmFloat;
+typedef double mmDouble;
+typedef int HObj;
+typedef HObj HFile;
+typedef int HDir;
+typedef void* mmMutexHandle;
+typedef struct _fstat {
+ mmInt32 fileSize;
+} VE_FileAttribute;
+
+typedef struct {
+ mmInt32 second;
+ mmInt32 millisecond;
+} tsVE_Time;
+
+typedef struct {
+ mmInt32 year;
+ mmInt32 month;
+ mmInt32 day;
+ mmInt32 hour;
+ mmInt32 minute;
+ mmInt32 second;
+} TmDateTime;
+
+const mmUint8 bWeights[8][8][4] = {
+ {{64, 0, 0, 0}, {56, 0, 0, 8}, {48, 0, 0,16}, {40, 0, 0,24},
+ {32, 0, 0,32}, {24, 0, 0,40}, {16, 0, 0,48}, { 8, 0, 0,56}},
+
+ {{56, 8, 0, 0}, {49, 7, 1, 7}, {42, 6, 2,14}, {35, 5, 3,21},
+ {28, 4, 4,28}, {21, 3, 5,35}, {14, 2, 6,42}, { 7, 1, 7,49}},
+
+ {{48,16, 0, 0}, {42,14, 2, 6}, {36,12,4 ,12}, {30,10,6 ,18},
+ {24, 8, 8,24}, {18, 6,10,30}, {12,4 ,12,36}, { 6, 2,14,42}},
+
+ {{40,24,0 ,0 }, {35,21, 3, 5}, {30,18, 6,10}, {25,15, 9,15},
+ {20,12,12,20}, {15, 9,15,25}, {10, 6,18,30}, { 5, 3,21,35}},
+
+ {{32,32, 0,0 }, {28,28, 4, 4}, {24,24, 8, 8}, {20,20,12,12},
+ {16,16,16,16}, {12,12,20,20}, { 8, 8,24,24}, { 4, 4,28,28}},
+
+ {{24,40,0 ,0 }, {21,35, 5, 3}, {18,30,10, 6}, {15,25,15, 9},
+ {12,20,20,12}, { 9,15,25,15}, { 6,10,30,18}, { 3, 5,35,21}},
+
+ {{16,48, 0,0 }, {14,42, 6, 2}, {12,36,12, 4}, {10,30,18, 6},
+ {8 ,24,24,8 }, { 6,18,30,10}, { 4,12,36,12}, { 2, 6,42,14}},
+
+ {{ 8,56, 0,0 }, { 7,49, 7, 1}, { 6,42,14, 2}, { 5,35,21, 3},
+ { 4,28,28,4 }, { 3,21,35, 5}, { 2,14,42, 6}, { 1,7 ,49, 7}}
+};
+
+typedef enum {
+ IC_FORMAT_NONE,
+ IC_FORMAT_RGB565,
+ IC_FORMAT_RGB888,
+ IC_FORMAT_YCbCr420_lp,
+ IC_FORMAT_YCbCr,
+ IC_FORMAT_YCbCr420_FRAME_PK,
+ IC_FORMAT_MAX
+} enumImageFormat;
+
+/* This structure defines the format of an image */
+typedef struct {
+ mmInt32 uWidth;
+ mmInt32 uHeight;
+ mmInt32 uStride;
+ enumImageFormat eFormat;
+ mmByte *imgPtr;
+ mmByte *clrPtr;
+ mmInt32 uOffset;
+} structConvImage;
+
+typedef struct IC_crop_struct {
+ mmUint32 x; /* x pos of rectangle */
+ mmUint32 y; /* y pos of rectangle */
+ mmUint32 uWidth; /* dx of rectangle */
+ mmUint32 uHeight; /* dy of rectangle */
+} IC_rect_type;
+
+/*==========================================================================
+* Function Name : VT_resizeFrame_Video_opt2_lp
+*
+* Description : Resize a yuv frame.
+*
+* Input(s) : input_img_ptr -> Input Image Structure
+* : output_img_ptr -> Output Image Structure
+* : cropout -> crop structure
+*
+* Value Returned : mmBool -> FALSE on error TRUE on success
+* NOTE:
+* Not tested for crop funtionallity.
+* faster version.
+============================================================================*/
+mmBool
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ );
+
+#endif //#define NV12_RESIZE_H_
diff --git a/camera/inc/OmxFrameDecoder.h b/camera/inc/OmxFrameDecoder.h
new file mode 100644
index 0000000..7cbbf2c
--- /dev/null
+++ b/camera/inc/OmxFrameDecoder.h
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMXFRAMEDECODER_H_
+#define OMXFRAMEDECODER_H_
+
+
+#include <utils/threads.h>
+#include <utils/List.h>
+#include "FrameDecoder.h"
+#include "OMX_Types.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+enum OmxDecoderState {
+ OmxDecoderState_Unloaded = 0,
+ OmxDecoderState_Loaded,
+ OmxDecoderState_Idle,
+ OmxDecoderState_Executing,
+ OmxDecoderState_Error,
+ OmxDecoderState_Invalid,
+ OmxDecoderState_Reconfigure,
+ OmxDecoderState_Exit
+};
+
+enum PortType {
+ PortIndexInput = 0,
+ PortIndexOutput = 1
+};
+
+
+struct OmxMessage {
+ enum {
+ EVENT,
+ EMPTY_BUFFER_DONE,
+ FILL_BUFFER_DONE,
+ }type;
+
+ union {
+ // if type == EVENT
+ struct {
+ OMX_PTR appData;
+ OMX_EVENTTYPE event;
+ OMX_U32 data1;
+ OMX_U32 data2;
+ OMX_PTR pEventData;
+ } eventData;
+
+ // if type == (EMPTY_BUFFER_DONE || FILL_BUFFER_DONE)
+ struct {
+ OMX_PTR appData;
+ OMX_BUFFERHEADERTYPE* pBuffHead;
+ } bufferData;
+ } u;
+};
+
+class CallbackDispatcher;
+
+struct CallbackDispatcherThread : public android::Thread {
+ CallbackDispatcherThread(CallbackDispatcher *dispatcher)
+ : mDispatcher(dispatcher) {
+ }
+
+private:
+ CallbackDispatcher *mDispatcher;
+
+ bool threadLoop();
+
+ CallbackDispatcherThread(const CallbackDispatcherThread &);
+ CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
+};
+
+class CallbackDispatcher
+{
+
+public:
+ CallbackDispatcher();
+ ~CallbackDispatcher();
+
+ void post(const OmxMessage &msg);
+ bool loop();
+
+private:
+ void dispatch(const OmxMessage &msg);
+
+ CallbackDispatcher(const CallbackDispatcher &);
+ CallbackDispatcher &operator=(const CallbackDispatcher &);
+
+ android::Mutex mLock;
+ android::Condition mQueueChanged;
+ android::List<OmxMessage> mQueue;
+ android::sp<CallbackDispatcherThread> mThread;
+ bool mDone;
+};
+
+class OmxFrameDecoder : public FrameDecoder
+{
+
+public:
+ OmxFrameDecoder(DecoderType type = DecoderType_MJPEG);
+ virtual ~OmxFrameDecoder();
+
+ OMX_ERRORTYPE eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ OMX_ERRORTYPE fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+ OMX_ERRORTYPE emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ static OMX_ERRORTYPE emptyBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+ static OMX_ERRORTYPE fillBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height);
+
+protected:
+ virtual void doConfigure (const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart();
+ virtual void doStop();
+ virtual void doFlush();
+ virtual void doRelease();
+
+private:
+ status_t setComponentRole();
+ status_t enableGrallockHandles();
+ status_t allocateBuffersOutput();
+ void freeBuffersOnOutput();
+ void freeBuffersOnInput();
+ status_t doPortReconfigure();
+ void dumpPortSettings(PortType port);
+ status_t getAndConfigureDecoder();
+ status_t configureJpegPorts(int width, int height);
+ status_t switchToIdle();
+ status_t allocateBuffersInput();
+ status_t disablePortSync(int port);
+ status_t enablePortSync(int port);
+ void queueOutputBuffers();
+ status_t setVideoOutputFormat(OMX_U32 width, OMX_U32 height);
+
+
+ status_t omxInit();
+ status_t omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData, OMX_CALLBACKTYPE & callbacks);
+ OmxDecoderState getOmxState() { return mCurrentState; }
+ status_t commitState(OmxDecoderState state) { mPreviousState = mCurrentState; mCurrentState = state; return NO_ERROR; }
+ status_t setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat);
+ status_t omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
+ status_t omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr);
+ status_t omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr);
+ void omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def);
+ void omxDumpBufferHeader (OMX_BUFFERHEADERTYPE* bh);
+ status_t omxSwitchToExecutingSync();
+
+ bool mOmxInialized;
+
+ OMX_HANDLETYPE mHandleComp;
+ OmxDecoderState mCurrentState;
+ OmxDecoderState mPreviousState;
+
+ // Condition and Mutex used during OpenMAX state transitions & command completion
+ android::Condition mStateCondition;
+ android::Mutex mHwLock;
+
+ android::Vector<OMX_BUFFERHEADERTYPE*> mOutBufferHeaders;
+ android::Vector<OMX_BUFFERHEADERTYPE*> mInBufferHeaders;
+
+ CallbackDispatcher mDispatcher;
+
+ bool mStopping;
+ DecoderType mDecoderType;
+
+ // If true we will search for DHT in JPEG buffer
+ bool mIsNeedCheckDHT;
+ // If true we always append DHT to JPEG buffer
+ bool mAlwaysAppendDHT;
+};
+
+} //namespace Camera
+} //namespace Ti
+#endif /* OMXFRAMEDECODER_H_ */
diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h
new file mode 100644
index 0000000..44037b7
--- /dev/null
+++ b/camera/inc/SensorListener.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.h
+*
+* This defines API for camerahal to get sensor events
+*
+*/
+
+#ifndef ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+#define ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+
+#include <android/sensor.h>
+#include <gui/Sensor.h>
+#include <gui/SensorManager.h>
+#include <gui/SensorEventQueue.h>
+#include <utils/Looper.h>
+
+#include "Common.h"
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * SensorListner class - Registers with sensor manager to get sensor events
+ */
+
+typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
+
+class SensorLooperThread : public android::Thread {
+ public:
+ SensorLooperThread(android::Looper* looper)
+ : Thread(false) {
+ mLooper = android::sp<android::Looper>(looper);
+ }
+ ~SensorLooperThread() {
+ mLooper.clear();
+ }
+
+ virtual bool threadLoop() {
+ int32_t ret = mLooper->pollOnce(-1);
+ return true;
+ }
+
+ // force looper wake up
+ void wake() {
+ mLooper->wake();
+ }
+ private:
+ android::sp<android::Looper> mLooper;
+};
+
+
+class SensorListener : public android::RefBase
+{
+/* public - types */
+public:
+ typedef enum {
+ SENSOR_ACCELEROMETER = 1 << 0,
+ SENSOR_MAGNETIC_FIELD = 1 << 1,
+ SENSOR_GYROSCOPE = 1 << 2,
+ SENSOR_LIGHT = 1 << 3,
+ SENSOR_PROXIMITY = 1 << 4,
+ SENSOR_ORIENTATION = 1 << 5,
+ } sensor_type_t;
+/* public - functions */
+public:
+ SensorListener();
+ ~SensorListener();
+ status_t initialize();
+ void setCallbacks(orientation_callback_t orientation_cb, void *cookie);
+ void enableSensor(sensor_type_t type);
+ void disableSensor(sensor_type_t type);
+ void handleOrientation(uint32_t orientation, uint32_t tilt);
+/* public - member variables */
+public:
+ android::sp<android::SensorEventQueue> mSensorEventQueue;
+/* private - member variables */
+private:
+ int sensorsEnabled;
+ orientation_callback_t mOrientationCb;
+ void *mCbCookie;
+ android::sp<android::Looper> mLooper;
+ android::sp<SensorLooperThread> mSensorLooperThread;
+ android::Mutex mLock;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/inc/SwFrameDecoder.h b/camera/inc/SwFrameDecoder.h
new file mode 100644
index 0000000..f123940
--- /dev/null
+++ b/camera/inc/SwFrameDecoder.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SWFRAMEDECODER_H_
+#define SWFRAMEDECODER_H_
+
+#include "FrameDecoder.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+class SwFrameDecoder: public FrameDecoder {
+public:
+ SwFrameDecoder();
+ virtual ~SwFrameDecoder();
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart() { return NO_ERROR; }
+ virtual void doStop() { }
+ virtual void doFlush() { }
+ virtual void doRelease() { }
+
+private:
+ int mjpegWithHdrSize;
+ Decoder_libjpeg mJpgdecoder;
+ unsigned char* mJpegWithHeaderBuffer;
+};
+
+} // namespace Camera
+} // namespace Ti
+#endif /* SWFRAMEDECODER_H_ */
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
new file mode 100644
index 0000000..951c663
--- /dev/null
+++ b/camera/inc/TICameraParameters.h
@@ -0,0 +1,265 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_CAMERA_PARAMETERS_H
+#define TI_CAMERA_PARAMETERS_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+
+namespace Ti {
+namespace Camera {
+
+///TI Specific Camera Parameters
+class TICameraParameters
+{
+public:
+
+// Supported Camera indexes
+// Example value: "0,1,2,3", where 0-primary, 1-secondary1, 2-secondary2, 3-sterocamera
+static const char KEY_SUPPORTED_CAMERAS[];
+// Select logical Camera index
+static const char KEY_CAMERA[];
+static const char KEY_CAMERA_NAME[];
+static const char KEY_BURST[];
+static const char KEY_CAP_MODE[];
+static const char KEY_CAP_MODE_VALUES[];
+static const char KEY_VNF[];
+static const char KEY_VNF_SUPPORTED[];
+static const char KEY_SATURATION[];
+static const char KEY_BRIGHTNESS[];
+static const char KEY_SUPPORTED_EXPOSURE[];
+static const char KEY_EXPOSURE_MODE[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[];
+static const char KEY_MANUAL_EXPOSURE[];
+static const char KEY_MANUAL_EXPOSURE_RIGHT[];
+static const char KEY_MANUAL_GAIN_ISO[];
+static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
+static const char KEY_CONTRAST[];
+static const char KEY_SHARPNESS[];
+static const char KEY_ISO[];
+static const char KEY_SUPPORTED_ISO_VALUES[];
+static const char KEY_SUPPORTED_IPP[];
+static const char KEY_IPP[];
+static const char KEY_METERING_MODE[];
+static const char KEY_EXP_BRACKETING_RANGE[];
+static const char KEY_EXP_GAIN_BRACKETING_RANGE[];
+static const char KEY_ZOOM_BRACKETING_RANGE[];
+static const char KEY_TEMP_BRACKETING[];
+static const char KEY_TEMP_BRACKETING_RANGE_POS[];
+static const char KEY_TEMP_BRACKETING_RANGE_NEG[];
+static const char KEY_FLUSH_SHOT_CONFIG_QUEUE[];
+static const char KEY_SHUTTER_ENABLE[];
+static const char KEY_MEASUREMENT_ENABLE[];
+static const char KEY_INITIAL_VALUES[];
+static const char KEY_GBCE[];
+static const char KEY_GBCE_SUPPORTED[];
+static const char KEY_GLBCE[];
+static const char KEY_GLBCE_SUPPORTED[];
+static const char KEY_FRAMERATE_RANGES_EXT_SUPPORTED[];
+static const char KEY_FRAMERATES_EXT_SUPPORTED[];
+
+// TI recording hint to notify camera adapters of possible recording
+static const char KEY_RECORDING_HINT[];
+static const char KEY_AUTO_FOCUS_LOCK[];
+static const char KEY_CURRENT_ISO[];
+
+static const char KEY_SENSOR_ORIENTATION[];
+
+//TI extensions for camera capabilies
+static const char INITIAL_VALUES_TRUE[];
+static const char INITIAL_VALUES_FALSE[];
+
+// TI extensions to add values for ManualConvergence and AutoConvergence mode
+static const char KEY_AUTOCONVERGENCE_MODE[];
+static const char KEY_AUTOCONVERGENCE_MODE_VALUES[];
+static const char KEY_MANUAL_CONVERGENCE[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[];
+
+// TI extensions to add Min frame rate Values
+static const char VIDEO_MINFRAMERATE_5[];
+static const char VIDEO_MINFRAMERATE_10[];
+static const char VIDEO_MINFRAMERATE_15[];
+static const char VIDEO_MINFRAMERATE_20[];
+static const char VIDEO_MINFRAMERATE_24[];
+static const char VIDEO_MINFRAMERATE_25[];
+static const char VIDEO_MINFRAMERATE_30[];
+static const char VIDEO_MINFRAMERATE_33[];
+
+//TI extensions for setting EXIF tags
+static const char KEY_EXIF_MODEL[];
+static const char KEY_EXIF_MAKE[];
+
+//TI extensions for additional GPS data
+static const char KEY_GPS_MAPDATUM[];
+static const char KEY_GPS_VERSION[];
+static const char KEY_GPS_DATESTAMP[];
+
+// TI extensions for VTC
+static const char KEY_VTC_HINT[];
+static const char KEY_VIDEO_ENCODER_HANDLE[];
+static const char KEY_VIDEO_ENCODER_SLICE_HEIGHT[];
+
+static const char RAW_WIDTH[];
+static const char RAW_HEIGHT[];
+
+//TI extensions to Image post-processing
+static const char IPP_LDCNSF[];
+static const char IPP_LDC[];
+static const char IPP_NSF[];
+static const char IPP_NONE[];
+
+//TI extensions to camera mode
+static const char HIGH_PERFORMANCE_MODE[];
+static const char HIGH_QUALITY_MODE[];
+static const char HIGH_QUALITY_ZSL_MODE[];
+static const char CP_CAM_MODE[];
+static const char VIDEO_MODE[];
+static const char VIDEO_MODE_HQ[];
+static const char EXPOSURE_BRACKETING[];
+static const char ZOOM_BRACKETING[];
+static const char TEMP_BRACKETING[];
+
+// TI extensions to standard android pixel formats
+static const char PIXEL_FORMAT_UNUSED[];
+static const char PIXEL_FORMAT_JPS[];
+static const char PIXEL_FORMAT_MPO[];
+static const char PIXEL_FORMAT_YUV422I_UYVY[];
+
+// TI extensions to standard android scene mode settings
+static const char SCENE_MODE_CLOSEUP[];
+static const char SCENE_MODE_AQUA[];
+static const char SCENE_MODE_SNOWBEACH[];
+static const char SCENE_MODE_MOOD[];
+static const char SCENE_MODE_NIGHT_INDOOR[];
+static const char SCENE_MODE_DOCUMENT[];
+static const char SCENE_MODE_BARCODE[];
+static const char SCENE_MODE_VIDEO_SUPER_NIGHT[];
+static const char SCENE_MODE_VIDEO_CINE[];
+static const char SCENE_MODE_VIDEO_OLD_FILM[];
+
+// TI extensions to standard android white balance settings.
+static const char WHITE_BALANCE_TUNGSTEN[];
+static const char WHITE_BALANCE_HORIZON[];
+static const char WHITE_BALANCE_SUNSET[];
+static const char WHITE_BALANCE_FACE[];
+
+// TI extensions to add exposure preset modes to android api
+static const char EXPOSURE_MODE_MANUAL[];
+static const char EXPOSURE_MODE_AUTO[];
+static const char EXPOSURE_MODE_NIGHT[];
+static const char EXPOSURE_MODE_BACKLIGHT[];
+static const char EXPOSURE_MODE_SPOTLIGHT[];
+static const char EXPOSURE_MODE_SPORTS[];
+static const char EXPOSURE_MODE_SNOW[];
+static const char EXPOSURE_MODE_BEACH[];
+static const char EXPOSURE_MODE_APERTURE[];
+static const char EXPOSURE_MODE_SMALL_APERTURE[];
+static const char EXPOSURE_MODE_FACE[];
+
+// TI extensions to standard android focus presets.
+static const char FOCUS_MODE_PORTRAIT[];
+static const char FOCUS_MODE_EXTENDED[];
+static const char FOCUS_MODE_FACE[];
+static const char FOCUS_MODE_OFF[];
+
+// TI extensions to add iso values
+static const char ISO_MODE_AUTO[];
+static const char ISO_MODE_100[];
+static const char ISO_MODE_200[];
+static const char ISO_MODE_400[];
+static const char ISO_MODE_800[];
+static const char ISO_MODE_1000[];
+static const char ISO_MODE_1200[];
+static const char ISO_MODE_1600[];
+
+// TI extensions to add values for effect settings.
+static const char EFFECT_NATURAL[];
+static const char EFFECT_VIVID[];
+static const char EFFECT_COLOR_SWAP[];
+static const char EFFECT_BLACKWHITE[];
+
+//TI extensions for stereo frame layouts
+static const char KEY_S3D_PRV_FRAME_LAYOUT[];
+static const char KEY_S3D_PRV_FRAME_LAYOUT_VALUES[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT_VALUES[];
+
+//TI extensions for stereo frame layouts
+static const char S3D_NONE[];
+static const char S3D_TB_FULL[];
+static const char S3D_SS_FULL[];
+static const char S3D_TB_SUBSAMPLED[];
+static const char S3D_SS_SUBSAMPLED[];
+
+//TI extentions fo 3D resolutions
+static const char KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
+
+// TI extensions to add values for AutoConvergence settings.
+static const char AUTOCONVERGENCE_MODE_DISABLE[];
+static const char AUTOCONVERGENCE_MODE_FRAME[];
+static const char AUTOCONVERGENCE_MODE_CENTER[];
+static const char AUTOCONVERGENCE_MODE_TOUCH[];
+static const char AUTOCONVERGENCE_MODE_MANUAL[];
+
+//TI extensions for flash mode settings
+static const char FLASH_MODE_FILL_IN[];
+
+//TI extensions to add sensor orientation parameters
+static const char ORIENTATION_SENSOR_NONE[];
+static const char ORIENTATION_SENSOR_90[];
+static const char ORIENTATION_SENSOR_180[];
+static const char ORIENTATION_SENSOR_270[];
+
+
+//TI values for camera direction
+static const char FACING_FRONT[];
+static const char FACING_BACK[];
+
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+//TI extensions for enable/disable algos
+static const char KEY_ALGO_EXTERNAL_GAMMA[];
+static const char KEY_ALGO_NSF1[];
+static const char KEY_ALGO_NSF2[];
+static const char KEY_ALGO_SHARPENING[];
+static const char KEY_ALGO_THREELINCOLORMAP[];
+static const char KEY_ALGO_GIC[];
+
+//Gamma table
+static const char KEY_GAMMA_TABLE[];
+
+static const char KEY_PREVIEW_FRAME_RATE_RANGE[];
+
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
new file mode 100644
index 0000000..aa4993b
--- /dev/null
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef V4L_CAMERA_ADAPTER_H
+#define V4L_CAMERA_ADAPTER_H
+
+#include <linux/videodev2.h>
+
+#include "CameraHal.h"
+#include "BaseCameraAdapter.h"
+#include "DebugUtils.h"
+#include "Decoder_libjpeg.h"
+#include "FrameDecoder.h"
+
+
+namespace Ti {
+namespace Camera {
+
+#ifndef V4L2_PIX_FMT_H264
+#define V4L2_PIX_FMT_H264 0
+#endif
+
+#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+#define DEFAULT_CAPTURE_FORMAT V4L2_PIX_FMT_YUYV
+
+#define NB_BUFFER 10
+#define MAX_PATH_LENGTH 20
+#define MAX_VIDEO_DEVICES 10
+#define DEVICE "/dev/videoxx"
+#define DEVICE_PATH "/dev/"
+#define DEVICE_NAME "videoxx"
+
+typedef int V4L_HANDLETYPE;
+
+struct CapPixelformat {
+ uint32_t pixelformat;
+ const char *param;
+};
+
+struct CapResolution {
+ size_t width, height;
+ char param[10];
+};
+
+struct CapU32 {
+ uint32_t num;
+ const char *param;
+};
+
+typedef CapU32 CapFramerate;
+
+struct VideoInfo {
+ struct v4l2_capability cap;
+ struct v4l2_format format;
+ struct v4l2_buffer buf;
+ struct v4l2_requestbuffers rb;
+ void *mem[NB_BUFFER];
+ void *CaptureBuffers[NB_BUFFER];
+ bool isStreaming;
+ int width;
+ int height;
+ int formatIn;
+ int framesizeIn;
+};
+
+typedef struct V4L_TI_CAPTYPE {
+ uint16_t ulPreviewFormatCount; // supported preview pixelformat count
+ uint32_t ePreviewFormats[32];
+ uint16_t ulPreviewResCount; // supported preview resolution sizes
+ CapResolution tPreviewRes[32];
+ uint16_t ulCaptureResCount; // supported capture resolution sizes
+ CapResolution tCaptureRes[32];
+ uint16_t ulFrameRateCount; // supported frame rate
+ uint16_t ulFrameRates[32];
+}V4L_TI_CAPTYPE;
+
+/**
+ * Class which completely abstracts the camera hardware interaction from camera hal
+ * TODO: Need to list down here, all the message types that will be supported by this class
+ Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed
+ */
+class V4LCameraAdapter : public BaseCameraAdapter
+{
+public:
+
+ /*--------------------Constant declarations----------------------------------------*/
+ static const int32_t MAX_NO_BUFFERS = 20;
+
+ ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data
+ static const int MAX_NO_PORTS = 6;
+
+ ///Five second timeout
+ static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
+
+public:
+
+ V4LCameraAdapter(size_t sensor_index, CameraHal* hal);
+ ~V4LCameraAdapter();
+
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
+
+ // API
+ virtual status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
+ virtual status_t UseBuffersCapture(CameraBuffer *bufArr, int num);
+
+ static status_t getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle);
+
+ void setupWorkingMode();
+
+protected:
+
+//----------Parent class method implementation------------------------------------
+ virtual status_t startPreview();
+ virtual status_t stopPreview();
+ virtual status_t takePicture();
+ virtual status_t stopImageCapture();
+ virtual status_t autoFocus();
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+//-----------------------------------------------------------------------------
+
+
+private:
+
+ class PreviewThread : public android::Thread {
+ V4LCameraAdapter* mAdapter;
+ public:
+ PreviewThread(V4LCameraAdapter* hw) :
+ Thread(false), mAdapter(hw) { }
+ virtual void onFirstRef() {
+ run("CameraPreviewThread", android::PRIORITY_URGENT_DISPLAY);
+ }
+ virtual bool threadLoop() {
+ mAdapter->previewThread();
+ // loop until we need to quit
+ return true;
+ }
+ };
+
+ //Used for calculation of the average frame rate during preview
+ status_t recalculateFPS();
+
+ char * GetFrame(int &index, int &filledLen);
+
+ int previewThread();
+
+private:
+ //capabilities data
+ static const CapPixelformat mPixelformats [];
+ static const CapResolution mPreviewRes [];
+ static const CapFramerate mFramerates [];
+ static const CapResolution mImageCapRes [];
+
+ //camera defaults
+ static const char DEFAULT_PREVIEW_FORMAT[];
+ static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_NUM_PREV_BUFS[];
+
+ static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_FOCUS_MODE[];
+ static const char DEFAULT_FRAMERATE_RANGE[];
+ static const char * DEFAULT_VSTAB;
+ static const char * DEFAULT_VNF;
+
+ static status_t insertDefaults(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertCapabilities(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertPreviewFormats(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertPreviewSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertImageSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertFrameRates(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t sortAscend(V4L_TI_CAPTYPE&, uint16_t ) ;
+
+ status_t v4lIoctl(int, int, void*);
+ status_t v4lInitMmap(int& count, int width, int height);
+ status_t v4lInitUsrPtr(int&);
+ status_t v4lInitDmaBuf(int& count, int width, int height);
+ status_t v4lStartStreaming();
+ status_t v4lStopStreaming(int nBufferCount);
+ status_t v4lSetFormat(int, int, uint32_t);
+ status_t restartPreview();
+ status_t applyFpsValue();
+ status_t returnBufferToV4L(int id);
+ void returnOutputBuffer(int index);
+ bool isNeedToUseDecoder() const;
+
+ int mPreviewBufferCount;
+ int mPreviewBufferCountQueueable;
+ int mCaptureBufferCount;
+ int mCaptureBufferCountQueueable;
+ CameraBuffer *mPreviewBufs[NB_BUFFER];
+ android::KeyedVector<CameraBuffer *, int> mCaptureBufs;
+ CameraBuffer *mCameraBuffers;
+ android::sp<MemoryManager> mMemoryManager;
+
+ android::CameraParameters mParams;
+
+ bool mPreviewing;
+ bool mCapturing;
+ mutable android::Mutex mLock;
+
+ int mFrameCount;
+ int mLastFrameCount;
+ unsigned int mIter;
+ nsecs_t mLastFPSTime;
+
+ //variables holding the estimated framerate
+ float mFPS, mLastFPS;
+
+ int mSensorIndex;
+
+ bool mDeinterlaceEnabled;
+
+ // protected by mLock
+ android::sp<PreviewThread> mPreviewThread;
+
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
+
+ int nQueued;
+ int nDequeued;
+ int mQueuedOutputBuffers;
+
+ FrameDecoder* mDecoder;
+ android::Vector< android::sp<MediaBuffer> > mInBuffers;
+ android::Vector< android::sp<MediaBuffer> > mOutBuffers;
+
+ android::Mutex mV4LLock;
+
+ int mPixelFormat;
+ int mFrameRate;
+
+ android::Mutex mStopLock;
+ android::Condition mStopCondition;
+
+ CameraHal* mCameraHal;
+ int mSkipFramesCount;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif //V4L_CAMERA_ADAPTER_H
diff --git a/camera/inc/VideoMetadata.h b/camera/inc/VideoMetadata.h
new file mode 100644
index 0000000..f05ee50
--- /dev/null
+++ b/camera/inc/VideoMetadata.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_METADATA_H
+#define VIDEO_METADATA_H
+
+/* This structure is used to pass buffer offset from Camera-Hal to Encoder component
+ * for specific algorithms like VSTAB & VNF
+ */
+
+typedef struct
+{
+ int metadataBufferType;
+ void* handle;
+ int offset;
+}
+video_metadata_t;
+
+#endif
diff --git a/kernel-headers/uapi/linux/v4l2-common.h b/kernel-headers/uapi/linux/v4l2-common.h
new file mode 100644
index 0000000..4f0667e
--- /dev/null
+++ b/kernel-headers/uapi/linux/v4l2-common.h
@@ -0,0 +1,71 @@
+/*
+ * include/linux/v4l2-common.h
+ *
+ * Common V4L2 and V4L2 subdev definitions.
+ *
+ * Users are advised to #include this file either through videodev2.h
+ * (V4L2) or through v4l2-subdev.h (V4L2 subdev) rather than to refer
+ * to this file directly.
+ *
+ * Copyright (C) 2012 Nokia Corporation
+ * Contact: Sakari Ailus <sakari.ailus@iki.fi>
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * version 2 as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ *
+ */
+
+#ifndef __V4L2_COMMON__
+#define __V4L2_COMMON__
+
+/*
+ *
+ * Selection interface definitions
+ *
+ */
+
+/* Current cropping area */
+#define V4L2_SEL_TGT_CROP 0x0000
+/* Default cropping area */
+#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001
+/* Cropping bounds */
+#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002
+/* Current composing area */
+#define V4L2_SEL_TGT_COMPOSE 0x0100
+/* Default composing area */
+#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101
+/* Composing bounds */
+#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102
+/* Current composing area plus all padding pixels */
+#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103
+
+/* Backward compatibility target definitions --- to be removed. */
+#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP
+#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE
+#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP
+#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE
+#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS
+#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS
+
+/* Selection flags */
+#define V4L2_SEL_FLAG_GE (1 << 0)
+#define V4L2_SEL_FLAG_LE (1 << 1)
+#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2)
+
+/* Backward compatibility flag definitions --- to be removed. */
+#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE
+#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE
+#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG
+
+#endif /* __V4L2_COMMON__ */
diff --git a/kernel-headers/uapi/linux/v4l2-controls.h b/kernel-headers/uapi/linux/v4l2-controls.h
new file mode 100644
index 0000000..2cbe605
--- /dev/null
+++ b/kernel-headers/uapi/linux/v4l2-controls.h
@@ -0,0 +1,898 @@
+/*
+ * Video for Linux Two controls header file
+ *
+ * Copyright (C) 1999-2012 the contributors
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * Alternatively you can redistribute this file under the terms of the
+ * BSD license as stated below:
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * 3. The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The contents of this header was split off from videodev2.h. All control
+ * definitions should be added to this header, which is included by
+ * videodev2.h.
+ */
+
+#ifndef __LINUX_V4L2_CONTROLS_H
+#define __LINUX_V4L2_CONTROLS_H
+
+/* Control classes */
+#define V4L2_CTRL_CLASS_USER 0x00980000 /* Old-style 'user' controls */
+#define V4L2_CTRL_CLASS_MPEG 0x00990000 /* MPEG-compression controls */
+#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 /* Camera class controls */
+#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 /* FM Modulator controls */
+#define V4L2_CTRL_CLASS_FLASH 0x009c0000 /* Camera flash controls */
+#define V4L2_CTRL_CLASS_JPEG 0x009d0000 /* JPEG-compression controls */
+#define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 /* Image source controls */
+#define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 /* Image processing controls */
+#define V4L2_CTRL_CLASS_DV 0x00a00000 /* Digital Video controls */
+#define V4L2_CTRL_CLASS_FM_RX 0x00a10000 /* FM Receiver controls */
+
+/* User-class control IDs */
+
+#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
+#define V4L2_CID_USER_BASE V4L2_CID_BASE
+#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1)
+#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0)
+#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1)
+#define V4L2_CID_SATURATION (V4L2_CID_BASE+2)
+#define V4L2_CID_HUE (V4L2_CID_BASE+3)
+#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5)
+#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6)
+#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7)
+#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8)
+#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9)
+#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10)
+#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) /* Deprecated */
+#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12)
+#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13)
+#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14)
+#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15)
+#define V4L2_CID_GAMMA (V4L2_CID_BASE+16)
+#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) /* Deprecated */
+#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17)
+#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18)
+#define V4L2_CID_GAIN (V4L2_CID_BASE+19)
+#define V4L2_CID_HFLIP (V4L2_CID_BASE+20)
+#define V4L2_CID_VFLIP (V4L2_CID_BASE+21)
+
+#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24)
+enum v4l2_power_line_frequency {
+ V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0,
+ V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1,
+ V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2,
+ V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3,
+};
+#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25)
+#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26)
+#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27)
+#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28)
+#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29)
+#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30)
+#define V4L2_CID_COLORFX (V4L2_CID_BASE+31)
+enum v4l2_colorfx {
+ V4L2_COLORFX_NONE = 0,
+ V4L2_COLORFX_BW = 1,
+ V4L2_COLORFX_SEPIA = 2,
+ V4L2_COLORFX_NEGATIVE = 3,
+ V4L2_COLORFX_EMBOSS = 4,
+ V4L2_COLORFX_SKETCH = 5,
+ V4L2_COLORFX_SKY_BLUE = 6,
+ V4L2_COLORFX_GRASS_GREEN = 7,
+ V4L2_COLORFX_SKIN_WHITEN = 8,
+ V4L2_COLORFX_VIVID = 9,
+ V4L2_COLORFX_AQUA = 10,
+ V4L2_COLORFX_ART_FREEZE = 11,
+ V4L2_COLORFX_SILHOUETTE = 12,
+ V4L2_COLORFX_SOLARIZATION = 13,
+ V4L2_COLORFX_ANTIQUE = 14,
+ V4L2_COLORFX_SET_CBCR = 15,
+};
+#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32)
+#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33)
+
+#define V4L2_CID_ROTATE (V4L2_CID_BASE+34)
+#define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35)
+
+#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36)
+
+#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37)
+#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38)
+
+#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39)
+#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40)
+
+#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41)
+#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42)
+
+/* last CID + 1 */
+#define V4L2_CID_LASTP1 (V4L2_CID_BASE+43)
+
+/* USER-class private control IDs */
+
+/* The base for the meye driver controls. See linux/meye.h for the list
+ * of controls. We reserve 16 controls for this driver. */
+#define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000)
+
+/* The base for the bttv driver controls.
+ * We reserve 32 controls for this driver. */
+#define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010)
+
+
+/* The base for the s2255 driver controls.
+ * We reserve 16 controls for this driver. */
+#define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030)
+
+/* The base for the si476x driver controls. See include/media/si476x.h for the list
+ * of controls. Total of 16 controls is reserved for this driver */
+#define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040)
+
+/* The base for the TI VPE driver controls. Total of 16 controls is reserved for
+ * this driver */
+#define V4L2_CID_USER_TI_VPE_BASE (V4L2_CID_USER_BASE + 0x1050)
+
+/* The base for the saa7134 driver controls.
+ * We reserve 16 controls for this driver. */
+#define V4L2_CID_USER_SAA7134_BASE (V4L2_CID_USER_BASE + 0x1060)
+
+/* MPEG-class control IDs */
+/* The MPEG controls are applicable to all codec controls
+ * and the 'MPEG' part of the define is historical */
+
+#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900)
+#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1)
+
+/* MPEG streams, specific to multiplexed streams */
+#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0)
+enum v4l2_mpeg_stream_type {
+ V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, /* MPEG-2 program stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, /* MPEG-2 transport stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, /* MPEG-1 system stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, /* MPEG-2 DVD-compatible stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, /* MPEG-1 VCD-compatible stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, /* MPEG-2 SVCD-compatible stream */
+};
+#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1)
+#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2)
+#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3)
+#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4)
+#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5)
+#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6)
+#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7)
+enum v4l2_mpeg_stream_vbi_fmt {
+ V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, /* No VBI in the MPEG stream */
+ V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, /* VBI in private packets, IVTV format */
+};
+
+/* MPEG audio controls specific to multiplexed streams */
+#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100)
+enum v4l2_mpeg_audio_sampling_freq {
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0,
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1,
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2,
+};
+#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101)
+enum v4l2_mpeg_audio_encoding {
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0,
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1,
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2,
+ V4L2_MPEG_AUDIO_ENCODING_AAC = 3,
+ V4L2_MPEG_AUDIO_ENCODING_AC3 = 4,
+};
+#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102)
+enum v4l2_mpeg_audio_l1_bitrate {
+ V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1,
+ V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2,
+ V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3,
+ V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4,
+ V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5,
+ V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6,
+ V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7,
+ V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8,
+ V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9,
+ V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10,
+ V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11,
+ V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12,
+ V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103)
+enum v4l2_mpeg_audio_l2_bitrate {
+ V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1,
+ V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2,
+ V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3,
+ V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4,
+ V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5,
+ V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6,
+ V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7,
+ V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8,
+ V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9,
+ V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10,
+ V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11,
+ V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12,
+ V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104)
+enum v4l2_mpeg_audio_l3_bitrate {
+ V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1,
+ V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2,
+ V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3,
+ V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4,
+ V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5,
+ V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6,
+ V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7,
+ V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8,
+ V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9,
+ V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10,
+ V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11,
+ V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12,
+ V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105)
+enum v4l2_mpeg_audio_mode {
+ V4L2_MPEG_AUDIO_MODE_STEREO = 0,
+ V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1,
+ V4L2_MPEG_AUDIO_MODE_DUAL = 2,
+ V4L2_MPEG_AUDIO_MODE_MONO = 3,
+};
+#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106)
+enum v4l2_mpeg_audio_mode_extension {
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3,
+};
+#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107)
+enum v4l2_mpeg_audio_emphasis {
+ V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0,
+ V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1,
+ V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2,
+};
+#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108)
+enum v4l2_mpeg_audio_crc {
+ V4L2_MPEG_AUDIO_CRC_NONE = 0,
+ V4L2_MPEG_AUDIO_CRC_CRC16 = 1,
+};
+#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109)
+#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110)
+#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111)
+enum v4l2_mpeg_audio_ac3_bitrate {
+ V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18,
+};
+#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE+112)
+enum v4l2_mpeg_audio_dec_playback {
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0,
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1,
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2,
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3,
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4,
+ V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5,
+};
+#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE+113)
+
+/* MPEG video controls specific to multiplexed streams */
+#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200)
+enum v4l2_mpeg_video_encoding {
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0,
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1,
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2,
+};
+#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201)
+enum v4l2_mpeg_video_aspect {
+ V4L2_MPEG_VIDEO_ASPECT_1x1 = 0,
+ V4L2_MPEG_VIDEO_ASPECT_4x3 = 1,
+ V4L2_MPEG_VIDEO_ASPECT_16x9 = 2,
+ V4L2_MPEG_VIDEO_ASPECT_221x100 = 3,
+};
+#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202)
+#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203)
+#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204)
+#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205)
+#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206)
+enum v4l2_mpeg_video_bitrate_mode {
+ V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0,
+ V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207)
+#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208)
+#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209)
+#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210)
+#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211)
+#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE+212)
+#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE+213)
+#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE+214)
+#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE+215)
+#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE+216)
+enum v4l2_mpeg_video_header_mode {
+ V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0,
+ V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1,
+
+};
+#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE+217)
+#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE+218)
+#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE+219)
+#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE+220)
+#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE+221)
+enum v4l2_mpeg_video_multi_slice_mode {
+ V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0,
+ V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1,
+ V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2,
+};
+#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE+222)
+#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE+223)
+#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE+224)
+#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_BASE+225)
+#define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_MPEG_BASE+226)
+
+#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE+300)
+#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE+301)
+#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE+302)
+#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE+303)
+#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE+304)
+#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE+350)
+#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE+351)
+#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE+352)
+#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE+353)
+#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE+354)
+#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE+355)
+#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE+356)
+#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE+357)
+enum v4l2_mpeg_video_h264_entropy_mode {
+ V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0,
+ V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE+358)
+#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE+359)
+enum v4l2_mpeg_video_h264_level {
+ V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0,
+ V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1,
+ V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2,
+ V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3,
+ V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4,
+ V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5,
+ V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6,
+ V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7,
+ V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8,
+ V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9,
+ V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10,
+ V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11,
+ V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12,
+ V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13,
+ V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14,
+ V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE+360)
+#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE+361)
+#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE+362)
+enum v4l2_mpeg_video_h264_loop_filter_mode {
+ V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0,
+ V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1,
+ V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE+363)
+enum v4l2_mpeg_video_h264_profile {
+ V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0,
+ V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1,
+ V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2,
+ V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9,
+ V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10,
+ V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11,
+ V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12,
+ V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13,
+ V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14,
+ V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15,
+ V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE+364)
+#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE+365)
+#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE+366)
+#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE+367)
+enum v4l2_mpeg_video_h264_vui_sar_idc {
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16,
+ V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_BASE+368)
+#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_BASE+369)
+#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_BASE+370)
+enum v4l2_mpeg_video_h264_sei_fp_arrangement_type {
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0,
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1,
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2,
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3,
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4,
+ V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_BASE+371)
+#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_BASE+372)
+enum v4l2_mpeg_video_h264_fmo_map_type {
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5,
+ V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_BASE+373)
+#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_BASE+374)
+enum v4l2_mpeg_video_h264_fmo_change_dir {
+ V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0,
+ V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_BASE+375)
+#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_BASE+376)
+#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_BASE+377)
+#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_BASE+378)
+#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_BASE+379)
+#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_BASE+380)
+enum v4l2_mpeg_video_h264_hierarchical_coding_type {
+ V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0,
+ V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_BASE+381)
+#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_BASE+382)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE+400)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE+401)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE+402)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE+403)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE+404)
+#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE+405)
+enum v4l2_mpeg_video_mpeg4_level {
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6,
+ V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7,
+};
+#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE+406)
+enum v4l2_mpeg_video_mpeg4_profile {
+ V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0,
+ V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1,
+ V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2,
+ V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3,
+ V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4,
+};
+#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE+407)
+
+/* Control IDs for VP8 streams
+ * Although VP8 is not part of MPEG we add these controls to the MPEG class
+ * as that class is already handling other video compression standards
+ */
+#define V4L2_CID_MPEG_VIDEO_VPX_NUM_PARTITIONS (V4L2_CID_MPEG_BASE+500)
+enum v4l2_vp8_num_partitions {
+ V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION = 0,
+ V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS = 1,
+ V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS = 2,
+ V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS = 3,
+};
+#define V4L2_CID_MPEG_VIDEO_VPX_IMD_DISABLE_4X4 (V4L2_CID_MPEG_BASE+501)
+#define V4L2_CID_MPEG_VIDEO_VPX_NUM_REF_FRAMES (V4L2_CID_MPEG_BASE+502)
+enum v4l2_vp8_num_ref_frames {
+ V4L2_CID_MPEG_VIDEO_VPX_1_REF_FRAME = 0,
+ V4L2_CID_MPEG_VIDEO_VPX_2_REF_FRAME = 1,
+ V4L2_CID_MPEG_VIDEO_VPX_3_REF_FRAME = 2,
+};
+#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_LEVEL (V4L2_CID_MPEG_BASE+503)
+#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_SHARPNESS (V4L2_CID_MPEG_BASE+504)
+#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_REF_PERIOD (V4L2_CID_MPEG_BASE+505)
+#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_SEL (V4L2_CID_MPEG_BASE+506)
+enum v4l2_vp8_golden_frame_sel {
+ V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_PREV = 0,
+ V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_REF_PERIOD = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_VPX_MIN_QP (V4L2_CID_MPEG_BASE+507)
+#define V4L2_CID_MPEG_VIDEO_VPX_MAX_QP (V4L2_CID_MPEG_BASE+508)
+#define V4L2_CID_MPEG_VIDEO_VPX_I_FRAME_QP (V4L2_CID_MPEG_BASE+509)
+#define V4L2_CID_MPEG_VIDEO_VPX_P_FRAME_QP (V4L2_CID_MPEG_BASE+510)
+#define V4L2_CID_MPEG_VIDEO_VPX_PROFILE (V4L2_CID_MPEG_BASE+511)
+
+/* MPEG-class control IDs specific to the CX2341x driver as defined by V4L2 */
+#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0)
+enum v4l2_mpeg_cx2341x_video_spatial_filter_mode {
+ V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0,
+ V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2)
+enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3)
+enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4)
+enum v4l2_mpeg_cx2341x_video_temporal_filter_mode {
+ V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0,
+ V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6)
+enum v4l2_mpeg_cx2341x_video_median_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10)
+#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11)
+
+/* MPEG-class control IDs specific to the Samsung MFC 5.1 driver as defined by V4L2 */
+#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100)
+
+#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE+0)
+#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE+1)
+#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE+2)
+enum v4l2_mpeg_mfc51_video_frame_skip_mode {
+ V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0,
+ V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1,
+ V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2,
+};
+#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE+3)
+enum v4l2_mpeg_mfc51_video_force_frame_type {
+ V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0,
+ V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1,
+ V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2,
+};
+#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE+4)
+#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE+5)
+#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE+6)
+#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE+7)
+#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE+50)
+#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE+51)
+#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE+52)
+#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE+53)
+#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE+54)
+
+
+/* Camera class control IDs */
+
+#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900)
+#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1)
+
+#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1)
+enum v4l2_exposure_auto_type {
+ V4L2_EXPOSURE_AUTO = 0,
+ V4L2_EXPOSURE_MANUAL = 1,
+ V4L2_EXPOSURE_SHUTTER_PRIORITY = 2,
+ V4L2_EXPOSURE_APERTURE_PRIORITY = 3
+};
+#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2)
+#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3)
+
+#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4)
+#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5)
+#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6)
+#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7)
+
+#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8)
+#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9)
+
+#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10)
+#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11)
+#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12)
+
+#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13)
+#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14)
+#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15)
+
+#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16)
+
+#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17)
+#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18)
+
+#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19)
+
+#define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20)
+enum v4l2_auto_n_preset_white_balance {
+ V4L2_WHITE_BALANCE_MANUAL = 0,
+ V4L2_WHITE_BALANCE_AUTO = 1,
+ V4L2_WHITE_BALANCE_INCANDESCENT = 2,
+ V4L2_WHITE_BALANCE_FLUORESCENT = 3,
+ V4L2_WHITE_BALANCE_FLUORESCENT_H = 4,
+ V4L2_WHITE_BALANCE_HORIZON = 5,
+ V4L2_WHITE_BALANCE_DAYLIGHT = 6,
+ V4L2_WHITE_BALANCE_FLASH = 7,
+ V4L2_WHITE_BALANCE_CLOUDY = 8,
+ V4L2_WHITE_BALANCE_SHADE = 9,
+};
+
+#define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21)
+#define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22)
+
+#define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23)
+#define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24)
+enum v4l2_iso_sensitivity_auto_type {
+ V4L2_ISO_SENSITIVITY_MANUAL = 0,
+ V4L2_ISO_SENSITIVITY_AUTO = 1,
+};
+
+#define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25)
+enum v4l2_exposure_metering {
+ V4L2_EXPOSURE_METERING_AVERAGE = 0,
+ V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1,
+ V4L2_EXPOSURE_METERING_SPOT = 2,
+ V4L2_EXPOSURE_METERING_MATRIX = 3,
+};
+
+#define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26)
+enum v4l2_scene_mode {
+ V4L2_SCENE_MODE_NONE = 0,
+ V4L2_SCENE_MODE_BACKLIGHT = 1,
+ V4L2_SCENE_MODE_BEACH_SNOW = 2,
+ V4L2_SCENE_MODE_CANDLE_LIGHT = 3,
+ V4L2_SCENE_MODE_DAWN_DUSK = 4,
+ V4L2_SCENE_MODE_FALL_COLORS = 5,
+ V4L2_SCENE_MODE_FIREWORKS = 6,
+ V4L2_SCENE_MODE_LANDSCAPE = 7,
+ V4L2_SCENE_MODE_NIGHT = 8,
+ V4L2_SCENE_MODE_PARTY_INDOOR = 9,
+ V4L2_SCENE_MODE_PORTRAIT = 10,
+ V4L2_SCENE_MODE_SPORTS = 11,
+ V4L2_SCENE_MODE_SUNSET = 12,
+ V4L2_SCENE_MODE_TEXT = 13,
+};
+
+#define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27)
+#define V4L2_LOCK_EXPOSURE (1 << 0)
+#define V4L2_LOCK_WHITE_BALANCE (1 << 1)
+#define V4L2_LOCK_FOCUS (1 << 2)
+
+#define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28)
+#define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29)
+#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
+#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
+#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
+
+#define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31)
+enum v4l2_auto_focus_range {
+ V4L2_AUTO_FOCUS_RANGE_AUTO = 0,
+ V4L2_AUTO_FOCUS_RANGE_NORMAL = 1,
+ V4L2_AUTO_FOCUS_RANGE_MACRO = 2,
+ V4L2_AUTO_FOCUS_RANGE_INFINITY = 3,
+};
+
+
+/* FM Modulator class control IDs */
+
+#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900)
+#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1)
+
+#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1)
+#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2)
+#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3)
+#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5)
+#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6)
+
+#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64)
+#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65)
+#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66)
+
+#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80)
+#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81)
+#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82)
+#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83)
+#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84)
+
+#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96)
+#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97)
+#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98)
+
+#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112)
+enum v4l2_preemphasis {
+ V4L2_PREEMPHASIS_DISABLED = 0,
+ V4L2_PREEMPHASIS_50_uS = 1,
+ V4L2_PREEMPHASIS_75_uS = 2,
+};
+#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113)
+#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114)
+
+
+/* Flash and privacy (indicator) light controls */
+
+#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900)
+#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1)
+
+#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1)
+enum v4l2_flash_led_mode {
+ V4L2_FLASH_LED_MODE_NONE,
+ V4L2_FLASH_LED_MODE_FLASH,
+ V4L2_FLASH_LED_MODE_TORCH,
+};
+
+#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2)
+enum v4l2_flash_strobe_source {
+ V4L2_FLASH_STROBE_SOURCE_SOFTWARE,
+ V4L2_FLASH_STROBE_SOURCE_EXTERNAL,
+};
+
+#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3)
+#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4)
+#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5)
+
+#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6)
+#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7)
+#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8)
+#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9)
+
+#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10)
+#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0)
+#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1)
+#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2)
+#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3)
+#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4)
+#define V4L2_FLASH_FAULT_INDICATOR (1 << 5)
+
+#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11)
+#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12)
+
+
+/* JPEG-class control IDs */
+
+#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900)
+#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1)
+
+#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1)
+enum v4l2_jpeg_chroma_subsampling {
+ V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0,
+ V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1,
+ V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2,
+ V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3,
+ V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4,
+ V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5,
+};
+#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2)
+#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3)
+
+#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4)
+#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0)
+#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1)
+#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16)
+#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17)
+#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18)
+
+
+/* Image source controls */
+#define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900)
+#define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1)
+
+#define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1)
+#define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2)
+#define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3)
+
+
+/* Image processing controls */
+
+#define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900)
+#define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1)
+
+#define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1)
+#define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2)
+#define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3)
+
+
+/* DV-class control IDs defined by V4L2 */
+#define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900)
+#define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1)
+
+#define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1)
+#define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2)
+#define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3)
+#define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4)
+enum v4l2_dv_tx_mode {
+ V4L2_DV_TX_MODE_DVI_D = 0,
+ V4L2_DV_TX_MODE_HDMI = 1,
+};
+#define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5)
+enum v4l2_dv_rgb_range {
+ V4L2_DV_RGB_RANGE_AUTO = 0,
+ V4L2_DV_RGB_RANGE_LIMITED = 1,
+ V4L2_DV_RGB_RANGE_FULL = 2,
+};
+
+#define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100)
+#define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101)
+
+#define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900)
+#define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1)
+
+#define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1)
+enum v4l2_deemphasis {
+ V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED,
+ V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS,
+ V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS,
+};
+
+#define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2)
+
+#endif
diff --git a/kernel-headers/uapi/linux/v4l2-dv-timings.h b/kernel-headers/uapi/linux/v4l2-dv-timings.h
new file mode 100644
index 0000000..be709fe
--- /dev/null
+++ b/kernel-headers/uapi/linux/v4l2-dv-timings.h
@@ -0,0 +1,826 @@
+/*
+ * V4L2 DV timings header.
+ *
+ * Copyright (C) 2012 Hans Verkuil <hans.verkuil@cisco.com>
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * version 2 as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef _V4L2_DV_TIMINGS_H
+#define _V4L2_DV_TIMINGS_H
+
+#if __GNUC__ < 4 || (__GNUC__ == 4 && (__GNUC_MINOR__ < 6))
+/* Sadly gcc versions older than 4.6 have a bug in how they initialize
+ anonymous unions where they require additional curly brackets.
+ This violates the C1x standard. This workaround adds the curly brackets
+ if needed. */
+#define V4L2_INIT_BT_TIMINGS(_width, args...) \
+ { .bt = { _width , ## args } }
+#else
+#define V4L2_INIT_BT_TIMINGS(_width, args...) \
+ .bt = { _width , ## args }
+#endif
+
+/* CEA-861-E timings (i.e. standard HDTV timings) */
+
+#define V4L2_DV_BT_CEA_640X480P59_94 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, \
+ 25175000, 16, 96, 48, 10, 2, 33, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+/* Note: these are the nominal timings, for HDMI links this format is typically
+ * double-clocked to meet the minimum pixelclock requirements. */
+#define V4L2_DV_BT_CEA_720X480I59_94 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(720, 480, 1, 0, \
+ 13500000, 19, 62, 57, 4, 3, 15, 4, 3, 16, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) \
+}
+
+#define V4L2_DV_BT_CEA_720X480P59_94 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(720, 480, 0, 0, \
+ 27000000, 16, 62, 60, 9, 6, 30, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+/* Note: these are the nominal timings, for HDMI links this format is typically
+ * double-clocked to meet the minimum pixelclock requirements. */
+#define V4L2_DV_BT_CEA_720X576I50 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(720, 576, 1, 0, \
+ 13500000, 12, 63, 69, 2, 3, 19, 2, 3, 20, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) \
+}
+
+#define V4L2_DV_BT_CEA_720X576P50 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(720, 576, 0, 0, \
+ 27000000, 12, 64, 68, 5, 5, 39, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+#define V4L2_DV_BT_CEA_1280X720P24 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 720, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 59400000, 1760, 40, 220, 5, 5, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, \
+ V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+#define V4L2_DV_BT_CEA_1280X720P25 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 720, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 2420, 40, 220, 5, 5, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+#define V4L2_DV_BT_CEA_1280X720P30 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 720, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 1760, 40, 220, 5, 5, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+#define V4L2_DV_BT_CEA_1280X720P50 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 720, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+#define V4L2_DV_BT_CEA_1280X720P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 720, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080P24 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080P25 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 528, 44, 148, 4, 5, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080P30 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080I50 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 1, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 528, 44, 148, 2, 5, 15, 2, 5, 16, \
+ V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080P50 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 148500000, 528, 44, 148, 4, 5, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_CEA861, 0) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080I60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 1, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 74250000, 88, 44, 148, 2, 5, 15, 2, 5, 16, \
+ V4L2_DV_BT_STD_CEA861, \
+ V4L2_DV_FL_CAN_REDUCE_FPS | V4L2_DV_FL_HALF_LINE) \
+}
+
+#define V4L2_DV_BT_CEA_1920X1080P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1080, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, \
+ V4L2_DV_FL_CAN_REDUCE_FPS) \
+}
+
+
+/* VESA Discrete Monitor Timings as per version 1.0, revision 12 */
+
+#define V4L2_DV_BT_DMT_640X350P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 350, 0, V4L2_DV_HSYNC_POS_POL, \
+ 31500000, 32, 64, 96, 32, 3, 60, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_640X400P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 400, 0, V4L2_DV_VSYNC_POS_POL, \
+ 31500000, 32, 64, 96, 1, 3, 41, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_720X400P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(720, 400, 0, V4L2_DV_VSYNC_POS_POL, \
+ 35500000, 36, 72, 108, 1, 3, 42, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+/* VGA resolutions */
+#define V4L2_DV_BT_DMT_640X480P60 V4L2_DV_BT_CEA_640X480P59_94
+
+#define V4L2_DV_BT_DMT_640X480P72 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, \
+ 31500000, 24, 40, 128, 9, 3, 28, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_640X480P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, \
+ 31500000, 16, 64, 120, 1, 3, 16, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_640X480P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, \
+ 36000000, 56, 56, 80, 1, 3, 25, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+/* SVGA resolutions */
+#define V4L2_DV_BT_DMT_800X600P56 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 36000000, 24, 72, 128, 1, 2, 22, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_800X600P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 40000000, 40, 128, 88, 1, 4, 23, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_800X600P72 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 50000000, 56, 120, 64, 37, 6, 23, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_800X600P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 49500000, 16, 80, 160, 1, 3, 21, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_800X600P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 56250000, 32, 64, 152, 1, 3, 27, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_800X600P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL, \
+ 73250000, 48, 32, 80, 3, 4, 29, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_848X480P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(848, 480, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 33750000, 16, 112, 112, 6, 8, 23, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1024X768I43 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 1, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 44900000, 8, 176, 56, 0, 4, 20, 0, 4, 21, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+/* XGA resolutions */
+#define V4L2_DV_BT_DMT_1024X768P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 0, 0, \
+ 65000000, 24, 136, 160, 3, 6, 29, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1024X768P70 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 0, 0, \
+ 75000000, 24, 136, 144, 3, 6, 29, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1024X768P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 78750000, 16, 96, 176, 1, 3, 28, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1024X768P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 94500000, 48, 96, 208, 1, 3, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1024X768P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1024, 768, 0, V4L2_DV_HSYNC_POS_POL, \
+ 115500000, 48, 32, 80, 3, 4, 38, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* XGA+ resolution */
+#define V4L2_DV_BT_DMT_1152X864P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1152, 864, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 108000000, 64, 128, 256, 1, 3, 32, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X720P60 V4L2_DV_BT_CEA_1280X720P60
+
+/* WXGA resolutions */
+#define V4L2_DV_BT_DMT_1280X768P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_HSYNC_POS_POL, \
+ 68250000, 48, 32, 80, 3, 7, 12, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1280X768P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, \
+ 79500000, 64, 128, 192, 3, 7, 20, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X768P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, \
+ 102250000, 80, 128, 208, 3, 7, 27, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X768P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, \
+ 117500000, 80, 136, 216, 3, 7, 31, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X768P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_HSYNC_POS_POL, \
+ 140250000, 48, 32, 80, 3, 7, 35, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1280X800P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_HSYNC_POS_POL, \
+ 71000000, 48, 32, 80, 3, 6, 14, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1280X800P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, \
+ 83500000, 72, 128, 200, 3, 6, 22, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X800P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, \
+ 106500000, 80, 128, 208, 3, 6, 29, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X800P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, \
+ 122500000, 80, 136, 216, 3, 6, 34, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X800P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_HSYNC_POS_POL, \
+ 146250000, 48, 32, 80, 3, 6, 38, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1280X960P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 960, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 108000000, 96, 112, 312, 1, 3, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X960P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 960, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 148500000, 64, 160, 224, 1, 3, 47, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X960P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 960, 0, V4L2_DV_HSYNC_POS_POL, \
+ 175500000, 48, 32, 80, 3, 4, 50, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* SXGA resolutions */
+#define V4L2_DV_BT_DMT_1280X1024P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 1024, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 108000000, 48, 112, 248, 1, 3, 38, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X1024P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 1024, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 135000000, 16, 144, 248, 1, 3, 38, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X1024P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 1024, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 157500000, 64, 160, 224, 1, 3, 44, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1280X1024P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1280, 1024, 0, V4L2_DV_HSYNC_POS_POL, \
+ 187250000, 48, 32, 80, 3, 7, 50, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1360X768P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1360, 768, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 85500000, 64, 112, 256, 3, 6, 18, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1360X768P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1360, 768, 0, V4L2_DV_HSYNC_POS_POL, \
+ 148250000, 48, 32, 80, 3, 5, 37, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1366X768P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1366, 768, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 85500000, 70, 143, 213, 3, 3, 24, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1366X768P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1366, 768, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 72000000, 14, 56, 64, 1, 3, 28, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* SXGA+ resolutions */
+#define V4L2_DV_BT_DMT_1400X1050P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_HSYNC_POS_POL, \
+ 101000000, 48, 32, 80, 3, 4, 23, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1400X1050P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 121750000, 88, 144, 232, 3, 4, 32, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1400X1050P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 156000000, 104, 144, 248, 3, 4, 42, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1400X1050P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 179500000, 104, 152, 256, 3, 4, 48, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1400X1050P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_HSYNC_POS_POL, \
+ 208000000, 48, 32, 80, 3, 4, 55, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* WXGA+ resolutions */
+#define V4L2_DV_BT_DMT_1440X900P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_HSYNC_POS_POL, \
+ 88750000, 48, 32, 80, 3, 6, 17, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1440X900P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, \
+ 106500000, 80, 152, 232, 3, 6, 25, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1440X900P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, \
+ 136750000, 96, 152, 248, 3, 6, 33, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1440X900P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, \
+ 157000000, 104, 152, 256, 3, 6, 39, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1440X900P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_HSYNC_POS_POL, \
+ 182750000, 48, 32, 80, 3, 6, 44, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1600X900P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 900, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 108000000, 24, 80, 96, 1, 3, 96, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* UXGA resolutions */
+#define V4L2_DV_BT_DMT_1600X1200P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 162000000, 64, 192, 304, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1600X1200P65 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 175500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1600X1200P70 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 189000000, 64, 192, 304, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1600X1200P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 202500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1600X1200P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 229500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1600X1200P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL, \
+ 268250000, 48, 32, 80, 3, 4, 64, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* WSXGA+ resolutions */
+#define V4L2_DV_BT_DMT_1680X1050P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_HSYNC_POS_POL, \
+ 119000000, 48, 32, 80, 3, 6, 21, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1680X1050P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 146250000, 104, 176, 280, 3, 6, 30, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1680X1050P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 187000000, 120, 176, 296, 3, 6, 40, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1680X1050P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, \
+ 214750000, 128, 176, 304, 3, 6, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1680X1050P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_HSYNC_POS_POL, \
+ 245500000, 48, 32, 80, 3, 6, 53, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1792X1344P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_VSYNC_POS_POL, \
+ 204750000, 128, 200, 328, 1, 3, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1792X1344P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_VSYNC_POS_POL, \
+ 261000000, 96, 216, 352, 1, 3, 69, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1792X1344P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_HSYNC_POS_POL, \
+ 333250000, 48, 32, 80, 3, 4, 72, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1856X1392P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_VSYNC_POS_POL, \
+ 218250000, 96, 224, 352, 1, 3, 43, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1856X1392P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_VSYNC_POS_POL, \
+ 288000000, 128, 224, 352, 1, 3, 104, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1856X1392P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_HSYNC_POS_POL, \
+ 356500000, 48, 32, 80, 3, 4, 75, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1080P60 V4L2_DV_BT_CEA_1920X1080P60
+
+/* WUXGA resolutions */
+#define V4L2_DV_BT_DMT_1920X1200P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_HSYNC_POS_POL, \
+ 154000000, 48, 32, 80, 3, 6, 26, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1200P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, \
+ 193250000, 136, 200, 336, 3, 6, 36, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1200P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, \
+ 245250000, 136, 208, 344, 3, 6, 46, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1200P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, \
+ 281250000, 144, 208, 352, 3, 6, 53, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1200P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_HSYNC_POS_POL, \
+ 317000000, 48, 32, 80, 3, 6, 62, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1440P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_VSYNC_POS_POL, \
+ 234000000, 128, 208, 344, 1, 3, 56, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1440P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_VSYNC_POS_POL, \
+ 297000000, 144, 224, 352, 1, 3, 56, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_1920X1440P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_HSYNC_POS_POL, \
+ 380500000, 48, 32, 80, 3, 4, 78, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_2048X1152P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2048, 1152, 0, \
+ V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, \
+ 162000000, 26, 80, 96, 1, 3, 44, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+/* WQXGA resolutions */
+#define V4L2_DV_BT_DMT_2560X1600P60_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_HSYNC_POS_POL, \
+ 268500000, 48, 32, 80, 3, 6, 37, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#define V4L2_DV_BT_DMT_2560X1600P60 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, \
+ 348500000, 192, 280, 472, 3, 6, 49, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_2560X1600P75 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, \
+ 443250000, 208, 280, 488, 3, 6, 63, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_2560X1600P85 { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, \
+ 505250000, 208, 280, 488, 3, 6, 73, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) \
+}
+
+#define V4L2_DV_BT_DMT_2560X1600P120_RB { \
+ .type = V4L2_DV_BT_656_1120, \
+ V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_HSYNC_POS_POL, \
+ 552750000, 48, 32, 80, 3, 6, 85, 0, 0, 0, \
+ V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, \
+ V4L2_DV_FL_REDUCED_BLANKING) \
+}
+
+#endif
diff --git a/kernel-headers/uapi/linux/v4l2-mediabus.h b/kernel-headers/uapi/linux/v4l2-mediabus.h
new file mode 100644
index 0000000..b5c3aab
--- /dev/null
+++ b/kernel-headers/uapi/linux/v4l2-mediabus.h
@@ -0,0 +1,135 @@
+/*
+ * Media Bus API header
+ *
+ * Copyright (C) 2009, Guennadi Liakhovetski <g.liakhovetski@gmx.de>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#ifndef __LINUX_V4L2_MEDIABUS_H
+#define __LINUX_V4L2_MEDIABUS_H
+
+#include <linux/types.h>
+#include <linux/videodev2.h>
+
+/*
+ * These pixel codes uniquely identify data formats on the media bus. Mostly
+ * they correspond to similarly named V4L2_PIX_FMT_* formats, format 0 is
+ * reserved, V4L2_MBUS_FMT_FIXED shall be used by host-client pairs, where the
+ * data format is fixed. Additionally, "2X8" means that one pixel is transferred
+ * in two 8-bit samples, "BE" or "LE" specify in which order those samples are
+ * transferred over the bus: "LE" means that the least significant bits are
+ * transferred first, "BE" means that the most significant bits are transferred
+ * first, and "PADHI" and "PADLO" define which bits - low or high, in the
+ * incomplete high byte, are filled with padding bits.
+ *
+ * The pixel codes are grouped by type, bus_width, bits per component, samples
+ * per pixel and order of subsamples. Numerical values are sorted using generic
+ * numerical sort order (8 thus comes before 10).
+ *
+ * As their value can't change when a new pixel code is inserted in the
+ * enumeration, the pixel codes are explicitly given a numerical value. The next
+ * free values for each category are listed below, update them when inserting
+ * new pixel codes.
+ */
+enum v4l2_mbus_pixelcode {
+ V4L2_MBUS_FMT_FIXED = 0x0001,
+
+ /* RGB - next is 0x100e */
+ V4L2_MBUS_FMT_RGB444_2X8_PADHI_BE = 0x1001,
+ V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE = 0x1002,
+ V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE = 0x1003,
+ V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE = 0x1004,
+ V4L2_MBUS_FMT_BGR565_2X8_BE = 0x1005,
+ V4L2_MBUS_FMT_BGR565_2X8_LE = 0x1006,
+ V4L2_MBUS_FMT_RGB565_2X8_BE = 0x1007,
+ V4L2_MBUS_FMT_RGB565_2X8_LE = 0x1008,
+ V4L2_MBUS_FMT_RGB666_1X18 = 0x1009,
+ V4L2_MBUS_FMT_RGB888_1X24 = 0x100a,
+ V4L2_MBUS_FMT_RGB888_2X12_BE = 0x100b,
+ V4L2_MBUS_FMT_RGB888_2X12_LE = 0x100c,
+ V4L2_MBUS_FMT_ARGB8888_1X32 = 0x100d,
+
+ /* YUV (including grey) - next is 0x2018 */
+ V4L2_MBUS_FMT_Y8_1X8 = 0x2001,
+ V4L2_MBUS_FMT_UV8_1X8 = 0x2015,
+ V4L2_MBUS_FMT_UYVY8_1_5X8 = 0x2002,
+ V4L2_MBUS_FMT_VYUY8_1_5X8 = 0x2003,
+ V4L2_MBUS_FMT_YUYV8_1_5X8 = 0x2004,
+ V4L2_MBUS_FMT_YVYU8_1_5X8 = 0x2005,
+ V4L2_MBUS_FMT_UYVY8_2X8 = 0x2006,
+ V4L2_MBUS_FMT_VYUY8_2X8 = 0x2007,
+ V4L2_MBUS_FMT_YUYV8_2X8 = 0x2008,
+ V4L2_MBUS_FMT_YVYU8_2X8 = 0x2009,
+ V4L2_MBUS_FMT_Y10_1X10 = 0x200a,
+ V4L2_MBUS_FMT_YUYV10_2X10 = 0x200b,
+ V4L2_MBUS_FMT_YVYU10_2X10 = 0x200c,
+ V4L2_MBUS_FMT_Y12_1X12 = 0x2013,
+ V4L2_MBUS_FMT_UYVY8_1X16 = 0x200f,
+ V4L2_MBUS_FMT_VYUY8_1X16 = 0x2010,
+ V4L2_MBUS_FMT_YUYV8_1X16 = 0x2011,
+ V4L2_MBUS_FMT_YVYU8_1X16 = 0x2012,
+ V4L2_MBUS_FMT_YDYUYDYV8_1X16 = 0x2014,
+ V4L2_MBUS_FMT_YUYV10_1X20 = 0x200d,
+ V4L2_MBUS_FMT_YVYU10_1X20 = 0x200e,
+ V4L2_MBUS_FMT_YUV10_1X30 = 0x2016,
+ V4L2_MBUS_FMT_AYUV8_1X32 = 0x2017,
+
+ /* Bayer - next is 0x3019 */
+ V4L2_MBUS_FMT_SBGGR8_1X8 = 0x3001,
+ V4L2_MBUS_FMT_SGBRG8_1X8 = 0x3013,
+ V4L2_MBUS_FMT_SGRBG8_1X8 = 0x3002,
+ V4L2_MBUS_FMT_SRGGB8_1X8 = 0x3014,
+ V4L2_MBUS_FMT_SBGGR10_ALAW8_1X8 = 0x3015,
+ V4L2_MBUS_FMT_SGBRG10_ALAW8_1X8 = 0x3016,
+ V4L2_MBUS_FMT_SGRBG10_ALAW8_1X8 = 0x3017,
+ V4L2_MBUS_FMT_SRGGB10_ALAW8_1X8 = 0x3018,
+ V4L2_MBUS_FMT_SBGGR10_DPCM8_1X8 = 0x300b,
+ V4L2_MBUS_FMT_SGBRG10_DPCM8_1X8 = 0x300c,
+ V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8 = 0x3009,
+ V4L2_MBUS_FMT_SRGGB10_DPCM8_1X8 = 0x300d,
+ V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE = 0x3003,
+ V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE = 0x3004,
+ V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE = 0x3005,
+ V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE = 0x3006,
+ V4L2_MBUS_FMT_SBGGR10_1X10 = 0x3007,
+ V4L2_MBUS_FMT_SGBRG10_1X10 = 0x300e,
+ V4L2_MBUS_FMT_SGRBG10_1X10 = 0x300a,
+ V4L2_MBUS_FMT_SRGGB10_1X10 = 0x300f,
+ V4L2_MBUS_FMT_SBGGR12_1X12 = 0x3008,
+ V4L2_MBUS_FMT_SGBRG12_1X12 = 0x3010,
+ V4L2_MBUS_FMT_SGRBG12_1X12 = 0x3011,
+ V4L2_MBUS_FMT_SRGGB12_1X12 = 0x3012,
+
+ /* JPEG compressed formats - next is 0x4002 */
+ V4L2_MBUS_FMT_JPEG_1X8 = 0x4001,
+
+ /* Vendor specific formats - next is 0x5002 */
+
+ /* S5C73M3 sensor specific interleaved UYVY and JPEG */
+ V4L2_MBUS_FMT_S5C_UYVY_JPEG_1X8 = 0x5001,
+
+ /* HSV - next is 0x6002 */
+ V4L2_MBUS_FMT_AHSV8888_1X32 = 0x6001,
+};
+
+/**
+ * struct v4l2_mbus_framefmt - frame format on the media bus
+ * @width: frame width
+ * @height: frame height
+ * @code: data format code (from enum v4l2_mbus_pixelcode)
+ * @field: used interlacing type (from enum v4l2_field)
+ * @colorspace: colorspace of the data (from enum v4l2_colorspace)
+ */
+struct v4l2_mbus_framefmt {
+ __u32 width;
+ __u32 height;
+ __u32 code;
+ __u32 field;
+ __u32 colorspace;
+ __u32 reserved[7];
+};
+
+#endif
diff --git a/kernel-headers/uapi/linux/v4l2-subdev.h b/kernel-headers/uapi/linux/v4l2-subdev.h
new file mode 100644
index 0000000..a33c4da
--- /dev/null
+++ b/kernel-headers/uapi/linux/v4l2-subdev.h
@@ -0,0 +1,180 @@
+/*
+ * V4L2 subdev userspace API
+ *
+ * Copyright (C) 2010 Nokia Corporation
+ *
+ * Contacts: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+ * Sakari Ailus <sakari.ailus@iki.fi>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#ifndef __LINUX_V4L2_SUBDEV_H
+#define __LINUX_V4L2_SUBDEV_H
+
+#include <linux/ioctl.h>
+#include <linux/types.h>
+#include <linux/v4l2-common.h>
+#include <linux/v4l2-mediabus.h>
+
+/**
+ * enum v4l2_subdev_format_whence - Media bus format type
+ * @V4L2_SUBDEV_FORMAT_TRY: try format, for negotiation only
+ * @V4L2_SUBDEV_FORMAT_ACTIVE: active format, applied to the device
+ */
+enum v4l2_subdev_format_whence {
+ V4L2_SUBDEV_FORMAT_TRY = 0,
+ V4L2_SUBDEV_FORMAT_ACTIVE = 1,
+};
+
+/**
+ * struct v4l2_subdev_format - Pad-level media bus format
+ * @which: format type (from enum v4l2_subdev_format_whence)
+ * @pad: pad number, as reported by the media API
+ * @format: media bus format (format code and frame size)
+ */
+struct v4l2_subdev_format {
+ __u32 which;
+ __u32 pad;
+ struct v4l2_mbus_framefmt format;
+ __u32 reserved[8];
+};
+
+/**
+ * struct v4l2_subdev_crop - Pad-level crop settings
+ * @which: format type (from enum v4l2_subdev_format_whence)
+ * @pad: pad number, as reported by the media API
+ * @rect: pad crop rectangle boundaries
+ */
+struct v4l2_subdev_crop {
+ __u32 which;
+ __u32 pad;
+ struct v4l2_rect rect;
+ __u32 reserved[8];
+};
+
+/**
+ * struct v4l2_subdev_mbus_code_enum - Media bus format enumeration
+ * @pad: pad number, as reported by the media API
+ * @index: format index during enumeration
+ * @code: format code (from enum v4l2_mbus_pixelcode)
+ */
+struct v4l2_subdev_mbus_code_enum {
+ __u32 pad;
+ __u32 index;
+ __u32 code;
+ __u32 reserved[9];
+};
+
+/**
+ * struct v4l2_subdev_frame_size_enum - Media bus format enumeration
+ * @pad: pad number, as reported by the media API
+ * @index: format index during enumeration
+ * @code: format code (from enum v4l2_mbus_pixelcode)
+ */
+struct v4l2_subdev_frame_size_enum {
+ __u32 index;
+ __u32 pad;
+ __u32 code;
+ __u32 min_width;
+ __u32 max_width;
+ __u32 min_height;
+ __u32 max_height;
+ __u32 reserved[9];
+};
+
+/**
+ * struct v4l2_subdev_frame_interval - Pad-level frame rate
+ * @pad: pad number, as reported by the media API
+ * @interval: frame interval in seconds
+ */
+struct v4l2_subdev_frame_interval {
+ __u32 pad;
+ struct v4l2_fract interval;
+ __u32 reserved[9];
+};
+
+/**
+ * struct v4l2_subdev_frame_interval_enum - Frame interval enumeration
+ * @pad: pad number, as reported by the media API
+ * @index: frame interval index during enumeration
+ * @code: format code (from enum v4l2_mbus_pixelcode)
+ * @width: frame width in pixels
+ * @height: frame height in pixels
+ * @interval: frame interval in seconds
+ */
+struct v4l2_subdev_frame_interval_enum {
+ __u32 index;
+ __u32 pad;
+ __u32 code;
+ __u32 width;
+ __u32 height;
+ struct v4l2_fract interval;
+ __u32 reserved[9];
+};
+
+/**
+ * struct v4l2_subdev_selection - selection info
+ *
+ * @which: either V4L2_SUBDEV_FORMAT_ACTIVE or V4L2_SUBDEV_FORMAT_TRY
+ * @pad: pad number, as reported by the media API
+ * @target: Selection target, used to choose one of possible rectangles,
+ * defined in v4l2-common.h; V4L2_SEL_TGT_* .
+ * @flags: constraint flags, defined in v4l2-common.h; V4L2_SEL_FLAG_*.
+ * @r: coordinates of the selection window
+ * @reserved: for future use, set to zero for now
+ *
+ * Hardware may use multiple helper windows to process a video stream.
+ * The structure is used to exchange this selection areas between
+ * an application and a driver.
+ */
+struct v4l2_subdev_selection {
+ __u32 which;
+ __u32 pad;
+ __u32 target;
+ __u32 flags;
+ struct v4l2_rect r;
+ __u32 reserved[8];
+};
+
+struct v4l2_subdev_edid {
+ __u32 pad;
+ __u32 start_block;
+ __u32 blocks;
+ __u32 reserved[5];
+ __u8 __user *edid;
+};
+
+#define VIDIOC_SUBDEV_G_FMT _IOWR('V', 4, struct v4l2_subdev_format)
+#define VIDIOC_SUBDEV_S_FMT _IOWR('V', 5, struct v4l2_subdev_format)
+#define VIDIOC_SUBDEV_G_FRAME_INTERVAL \
+ _IOWR('V', 21, struct v4l2_subdev_frame_interval)
+#define VIDIOC_SUBDEV_S_FRAME_INTERVAL \
+ _IOWR('V', 22, struct v4l2_subdev_frame_interval)
+#define VIDIOC_SUBDEV_ENUM_MBUS_CODE \
+ _IOWR('V', 2, struct v4l2_subdev_mbus_code_enum)
+#define VIDIOC_SUBDEV_ENUM_FRAME_SIZE \
+ _IOWR('V', 74, struct v4l2_subdev_frame_size_enum)
+#define VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL \
+ _IOWR('V', 75, struct v4l2_subdev_frame_interval_enum)
+#define VIDIOC_SUBDEV_G_CROP _IOWR('V', 59, struct v4l2_subdev_crop)
+#define VIDIOC_SUBDEV_S_CROP _IOWR('V', 60, struct v4l2_subdev_crop)
+#define VIDIOC_SUBDEV_G_SELECTION \
+ _IOWR('V', 61, struct v4l2_subdev_selection)
+#define VIDIOC_SUBDEV_S_SELECTION \
+ _IOWR('V', 62, struct v4l2_subdev_selection)
+#define VIDIOC_SUBDEV_G_EDID _IOWR('V', 40, struct v4l2_subdev_edid)
+#define VIDIOC_SUBDEV_S_EDID _IOWR('V', 41, struct v4l2_subdev_edid)
+
+#endif
diff --git a/kernel-headers/uapi/linux/videodev2.h b/kernel-headers/uapi/linux/videodev2.h
new file mode 100644
index 0000000..6ae7bbe
--- /dev/null
+++ b/kernel-headers/uapi/linux/videodev2.h
@@ -0,0 +1,1966 @@
+/*
+ * Video for Linux Two header file
+ *
+ * Copyright (C) 1999-2012 the contributors
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * Alternatively you can redistribute this file under the terms of the
+ * BSD license as stated below:
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * 3. The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Header file for v4l or V4L2 drivers and applications
+ * with public API.
+ * All kernel-specific stuff were moved to media/v4l2-dev.h, so
+ * no #if __KERNEL tests are allowed here
+ *
+ * See http://linuxtv.org for more info
+ *
+ * Author: Bill Dirks <bill@thedirks.org>
+ * Justin Schoeman
+ * Hans Verkuil <hverkuil@xs4all.nl>
+ * et al.
+ */
+#ifndef _UAPI__LINUX_VIDEODEV2_H
+#define _UAPI__LINUX_VIDEODEV2_H
+
+#ifndef __KERNEL__
+#include <sys/time.h>
+#endif
+#include <linux/compiler.h>
+#include <linux/ioctl.h>
+#include <linux/types.h>
+#include <linux/v4l2-common.h>
+#include <linux/v4l2-controls.h>
+
+/*
+ * Common stuff for both V4L1 and V4L2
+ * Moved from videodev.h
+ */
+#define VIDEO_MAX_FRAME 32
+#define VIDEO_MAX_PLANES 8
+
+/*
+ * M I S C E L L A N E O U S
+ */
+
+/* Four-character-code (FOURCC) */
+#define v4l2_fourcc(a, b, c, d)\
+ ((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
+
+/*
+ * E N U M S
+ */
+enum v4l2_field {
+ V4L2_FIELD_ANY = 0, /* driver can choose from none,
+ top, bottom, interlaced
+ depending on whatever it thinks
+ is approximate ... */
+ V4L2_FIELD_NONE = 1, /* this device has no fields ... */
+ V4L2_FIELD_TOP = 2, /* top field only */
+ V4L2_FIELD_BOTTOM = 3, /* bottom field only */
+ V4L2_FIELD_INTERLACED = 4, /* both fields interlaced */
+ V4L2_FIELD_SEQ_TB = 5, /* both fields sequential into one
+ buffer, top-bottom order */
+ V4L2_FIELD_SEQ_BT = 6, /* same as above + bottom-top order */
+ V4L2_FIELD_ALTERNATE = 7, /* both fields alternating into
+ separate buffers */
+ V4L2_FIELD_INTERLACED_TB = 8, /* both fields interlaced, top field
+ first and the top field is
+ transmitted first */
+ V4L2_FIELD_INTERLACED_BT = 9, /* both fields interlaced, top field
+ first and the bottom field is
+ transmitted first */
+};
+#define V4L2_FIELD_HAS_TOP(field) \
+ ((field) == V4L2_FIELD_TOP ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTTOM(field) \
+ ((field) == V4L2_FIELD_BOTTOM ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTH(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+
+enum v4l2_buf_type {
+ V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
+ V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
+ V4L2_BUF_TYPE_VBI_CAPTURE = 4,
+ V4L2_BUF_TYPE_VBI_OUTPUT = 5,
+ V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
+ V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
+#if 1
+ /* Experimental */
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
+#endif
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
+ /* Deprecated, do not use */
+ V4L2_BUF_TYPE_PRIVATE = 0x80,
+};
+
+#define V4L2_TYPE_IS_MULTIPLANAR(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
+
+#define V4L2_TYPE_IS_OUTPUT(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VBI_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT)
+
+enum v4l2_tuner_type {
+ V4L2_TUNER_RADIO = 1,
+ V4L2_TUNER_ANALOG_TV = 2,
+ V4L2_TUNER_DIGITAL_TV = 3,
+};
+
+enum v4l2_memory {
+ V4L2_MEMORY_MMAP = 1,
+ V4L2_MEMORY_USERPTR = 2,
+ V4L2_MEMORY_OVERLAY = 3,
+ V4L2_MEMORY_DMABUF = 4,
+};
+
+/* see also http://vektor.theorem.ca/graphics/ycbcr/ */
+enum v4l2_colorspace {
+ /* ITU-R 601 -- broadcast NTSC/PAL */
+ V4L2_COLORSPACE_SMPTE170M = 1,
+
+ /* 1125-Line (US) HDTV */
+ V4L2_COLORSPACE_SMPTE240M = 2,
+
+ /* HD and modern captures. */
+ V4L2_COLORSPACE_REC709 = 3,
+
+ /* broken BT878 extents (601, luma range 16-253 instead of 16-235) */
+ V4L2_COLORSPACE_BT878 = 4,
+
+ /* These should be useful. Assume 601 extents. */
+ V4L2_COLORSPACE_470_SYSTEM_M = 5,
+ V4L2_COLORSPACE_470_SYSTEM_BG = 6,
+
+ /* I know there will be cameras that send this. So, this is
+ * unspecified chromaticities and full 0-255 on each of the
+ * Y'CbCr components
+ */
+ V4L2_COLORSPACE_JPEG = 7,
+
+ /* For RGB colourspaces, this is probably a good start. */
+ V4L2_COLORSPACE_SRGB = 8,
+};
+
+enum v4l2_priority {
+ V4L2_PRIORITY_UNSET = 0, /* not initialized */
+ V4L2_PRIORITY_BACKGROUND = 1,
+ V4L2_PRIORITY_INTERACTIVE = 2,
+ V4L2_PRIORITY_RECORD = 3,
+ V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE,
+};
+
+struct v4l2_rect {
+ __s32 left;
+ __s32 top;
+ __u32 width;
+ __u32 height;
+};
+
+struct v4l2_fract {
+ __u32 numerator;
+ __u32 denominator;
+};
+
+/**
+ * struct v4l2_capability - Describes V4L2 device caps returned by VIDIOC_QUERYCAP
+ *
+ * @driver: name of the driver module (e.g. "bttv")
+ * @card: name of the card (e.g. "Hauppauge WinTV")
+ * @bus_info: name of the bus (e.g. "PCI:" + pci_name(pci_dev) )
+ * @version: KERNEL_VERSION
+ * @capabilities: capabilities of the physical device as a whole
+ * @device_caps: capabilities accessed via this particular device (node)
+ * @reserved: reserved fields for future extensions
+ */
+struct v4l2_capability {
+ __u8 driver[16];
+ __u8 card[32];
+ __u8 bus_info[32];
+ __u32 version;
+ __u32 capabilities;
+ __u32 device_caps;
+ __u32 reserved[3];
+};
+
+/* Values for 'capabilities' field */
+#define V4L2_CAP_VIDEO_CAPTURE 0x00000001 /* Is a video capture device */
+#define V4L2_CAP_VIDEO_OUTPUT 0x00000002 /* Is a video output device */
+#define V4L2_CAP_VIDEO_OVERLAY 0x00000004 /* Can do video overlay */
+#define V4L2_CAP_VBI_CAPTURE 0x00000010 /* Is a raw VBI capture device */
+#define V4L2_CAP_VBI_OUTPUT 0x00000020 /* Is a raw VBI output device */
+#define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 /* Is a sliced VBI capture device */
+#define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 /* Is a sliced VBI output device */
+#define V4L2_CAP_RDS_CAPTURE 0x00000100 /* RDS data capture */
+#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 /* Can do video output overlay */
+#define V4L2_CAP_HW_FREQ_SEEK 0x00000400 /* Can do hardware frequency seek */
+#define V4L2_CAP_RDS_OUTPUT 0x00000800 /* Is an RDS encoder */
+
+/* Is a video capture device that supports multiplanar formats */
+#define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000
+/* Is a video output device that supports multiplanar formats */
+#define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000
+/* Is a video mem-to-mem device that supports multiplanar formats */
+#define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000
+/* Is a video mem-to-mem device */
+#define V4L2_CAP_VIDEO_M2M 0x00008000
+
+#define V4L2_CAP_TUNER 0x00010000 /* has a tuner */
+#define V4L2_CAP_AUDIO 0x00020000 /* has audio support */
+#define V4L2_CAP_RADIO 0x00040000 /* is a radio device */
+#define V4L2_CAP_MODULATOR 0x00080000 /* has a modulator */
+
+#define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */
+#define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */
+#define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */
+
+#define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */
+
+/*
+ * V I D E O I M A G E F O R M A T
+ */
+struct v4l2_pix_format {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field; /* enum v4l2_field */
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
+ __u32 colorspace; /* enum v4l2_colorspace */
+ __u32 priv; /* private data, depends on pixelformat */
+};
+
+/* Pixel format FOURCC depth Description */
+
+/* RGB formats */
+#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */
+#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */
+#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */
+#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */
+#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */
+#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */
+#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */
+#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */
+#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */
+#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */
+#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */
+
+/* Grey formats */
+#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */
+#define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') /* 4 Greyscale */
+#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */
+#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */
+#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */
+#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */
+
+/* Grey bit-packed formats */
+#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */
+
+/* Palette formats */
+#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
+
+/* Chrominance formats */
+#define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */
+
+/* Luminance+Chrominance formats */
+#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
+#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
+#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */
+#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
+#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 16 YVU411 planar */
+#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */
+#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
+#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
+#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
+#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
+#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
+#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
+#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
+#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */
+
+/* two planes -- one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */
+#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */
+#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */
+#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */
+#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */
+
+/* two non contiguous planes - one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') /* 21 Y/CrCb 4:2:0 */
+#define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') /* 16 Y/CbCr 4:2:2 */
+#define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') /* 16 Y/CrCb 4:2:2 */
+#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 macroblocks */
+#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 macroblocks */
+
+/* three non contiguous planes - Y, Cb, Cr */
+#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */
+#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */
+
+/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
+#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') /* 10 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') /* 10 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') /* 10 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') /* 10 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
+ /* 10bit raw bayer a-law compressed to 8 bits */
+#define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8')
+#define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8')
+#define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8')
+#define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8')
+ /* 10bit raw bayer DPCM compressed to 8 bits */
+#define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8')
+#define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8')
+#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0')
+#define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8')
+ /*
+ * 10bit raw bayer, expanded to 16 bits
+ * xxxxrrrrrrrrrrxxxxgggggggggg xxxxggggggggggxxxxbbbbbbbbbb...
+ */
+#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
+
+/* compressed formats */
+#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
+#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */
+#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */
+#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */
+#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */
+#define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */
+#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */
+#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */
+#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */
+#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */
+#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
+#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */
+#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
+#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
+#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
+
+/* Vendor-specific formats */
+#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */
+#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */
+#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */
+#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */
+#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */
+#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */
+#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */
+#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */
+#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */
+#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */
+#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */
+#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') /* compressed RGGB bayer */
+#define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') /* compressed GBRG bayer */
+#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */
+#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */
+#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */
+#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */
+#define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') /* stv0680 bayer */
+#define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') /* tm5600/tm60x0 */
+#define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') /* one line of Y then 1 line of VYUY */
+#define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') /* YUV420 planar in blocks of 256 pixels */
+#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */
+#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */
+#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */
+
+/*
+ * F O R M A T E N U M E R A T I O N
+ */
+struct v4l2_fmtdesc {
+ __u32 index; /* Format number */
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 flags;
+ __u8 description[32]; /* Description string */
+ __u32 pixelformat; /* Format fourcc */
+ __u32 reserved[4];
+};
+
+#define V4L2_FMT_FLAG_COMPRESSED 0x0001
+#define V4L2_FMT_FLAG_EMULATED 0x0002
+
+#if 1
+ /* Experimental Frame Size and frame rate enumeration */
+/*
+ * F R A M E S I Z E E N U M E R A T I O N
+ */
+enum v4l2_frmsizetypes {
+ V4L2_FRMSIZE_TYPE_DISCRETE = 1,
+ V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
+ V4L2_FRMSIZE_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmsize_discrete {
+ __u32 width; /* Frame width [pixel] */
+ __u32 height; /* Frame height [pixel] */
+};
+
+struct v4l2_frmsize_stepwise {
+ __u32 min_width; /* Minimum frame width [pixel] */
+ __u32 max_width; /* Maximum frame width [pixel] */
+ __u32 step_width; /* Frame width step size [pixel] */
+ __u32 min_height; /* Minimum frame height [pixel] */
+ __u32 max_height; /* Maximum frame height [pixel] */
+ __u32 step_height; /* Frame height step size [pixel] */
+};
+
+struct v4l2_frmsizeenum {
+ __u32 index; /* Frame size number */
+ __u32 pixel_format; /* Pixel format */
+ __u32 type; /* Frame size type the device supports. */
+
+ union { /* Frame size */
+ struct v4l2_frmsize_discrete discrete;
+ struct v4l2_frmsize_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+
+/*
+ * F R A M E R A T E E N U M E R A T I O N
+ */
+enum v4l2_frmivaltypes {
+ V4L2_FRMIVAL_TYPE_DISCRETE = 1,
+ V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
+ V4L2_FRMIVAL_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmival_stepwise {
+ struct v4l2_fract min; /* Minimum frame interval [s] */
+ struct v4l2_fract max; /* Maximum frame interval [s] */
+ struct v4l2_fract step; /* Frame interval step size [s] */
+};
+
+struct v4l2_frmivalenum {
+ __u32 index; /* Frame format index */
+ __u32 pixel_format; /* Pixel format */
+ __u32 width; /* Frame width */
+ __u32 height; /* Frame height */
+ __u32 type; /* Frame interval type the device supports. */
+
+ union { /* Frame interval */
+ struct v4l2_fract discrete;
+ struct v4l2_frmival_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+#endif
+
+/*
+ * T I M E C O D E
+ */
+struct v4l2_timecode {
+ __u32 type;
+ __u32 flags;
+ __u8 frames;
+ __u8 seconds;
+ __u8 minutes;
+ __u8 hours;
+ __u8 userbits[4];
+};
+
+/* Type */
+#define V4L2_TC_TYPE_24FPS 1
+#define V4L2_TC_TYPE_25FPS 2
+#define V4L2_TC_TYPE_30FPS 3
+#define V4L2_TC_TYPE_50FPS 4
+#define V4L2_TC_TYPE_60FPS 5
+
+/* Flags */
+#define V4L2_TC_FLAG_DROPFRAME 0x0001 /* "drop-frame" mode */
+#define V4L2_TC_FLAG_COLORFRAME 0x0002
+#define V4L2_TC_USERBITS_field 0x000C
+#define V4L2_TC_USERBITS_USERDEFINED 0x0000
+#define V4L2_TC_USERBITS_8BITCHARS 0x0008
+/* The above is based on SMPTE timecodes */
+
+struct v4l2_jpegcompression {
+ int quality;
+
+ int APPn; /* Number of APP segment to be written,
+ * must be 0..15 */
+ int APP_len; /* Length of data in JPEG APPn segment */
+ char APP_data[60]; /* Data in the JPEG APPn segment. */
+
+ int COM_len; /* Length of data in JPEG COM segment */
+ char COM_data[60]; /* Data in JPEG COM segment */
+
+ __u32 jpeg_markers; /* Which markers should go into the JPEG
+ * output. Unless you exactly know what
+ * you do, leave them untouched.
+ * Including less markers will make the
+ * resulting code smaller, but there will
+ * be fewer applications which can read it.
+ * The presence of the APP and COM marker
+ * is influenced by APP_len and COM_len
+ * ONLY, not by this property! */
+
+#define V4L2_JPEG_MARKER_DHT (1<<3) /* Define Huffman Tables */
+#define V4L2_JPEG_MARKER_DQT (1<<4) /* Define Quantization Tables */
+#define V4L2_JPEG_MARKER_DRI (1<<5) /* Define Restart Interval */
+#define V4L2_JPEG_MARKER_COM (1<<6) /* Comment segment */
+#define V4L2_JPEG_MARKER_APP (1<<7) /* App segment, driver will
+ * always use APP0 */
+};
+
+/*
+ * M E M O R Y - M A P P I N G B U F F E R S
+ */
+struct v4l2_requestbuffers {
+ __u32 count;
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 memory; /* enum v4l2_memory */
+ __u32 reserved[2];
+};
+
+/**
+ * struct v4l2_plane - plane info for multi-planar buffers
+ * @bytesused: number of bytes occupied by data in the plane (payload)
+ * @length: size of this plane (NOT the payload) in bytes
+ * @mem_offset: when memory in the associated struct v4l2_buffer is
+ * V4L2_MEMORY_MMAP, equals the offset from the start of
+ * the device memory for this plane (or is a "cookie" that
+ * should be passed to mmap() called on the video node)
+ * @userptr: when memory is V4L2_MEMORY_USERPTR, a userspace pointer
+ * pointing to this plane
+ * @fd: when memory is V4L2_MEMORY_DMABUF, a userspace file
+ * descriptor associated with this plane
+ * @data_offset: offset in the plane to the start of data; usually 0,
+ * unless there is a header in front of the data
+ *
+ * Multi-planar buffers consist of one or more planes, e.g. an YCbCr buffer
+ * with two planes can have one plane for Y, and another for interleaved CbCr
+ * components. Each plane can reside in a separate memory buffer, or even in
+ * a completely separate memory node (e.g. in embedded devices).
+ */
+struct v4l2_plane {
+ __u32 bytesused;
+ __u32 length;
+ union {
+ __u32 mem_offset;
+ unsigned long userptr;
+ __s32 fd;
+ } m;
+ __u32 data_offset;
+ __u32 reserved[11];
+};
+
+/**
+ * struct v4l2_buffer - video buffer info
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @bytesused: number of bytes occupied by data in the buffer (payload);
+ * unused (set to 0) for multiplanar buffers
+ * @flags: buffer informational flags
+ * @field: enum v4l2_field; field order of the image in the buffer
+ * @timestamp: frame timestamp
+ * @timecode: frame timecode
+ * @sequence: sequence count of this frame
+ * @memory: enum v4l2_memory; the method, in which the actual video data is
+ * passed
+ * @offset: for non-multiplanar buffers with memory == V4L2_MEMORY_MMAP;
+ * offset from the start of the device memory for this plane,
+ * (or a "cookie" that should be passed to mmap() as offset)
+ * @userptr: for non-multiplanar buffers with memory == V4L2_MEMORY_USERPTR;
+ * a userspace pointer pointing to this buffer
+ * @fd: for non-multiplanar buffers with memory == V4L2_MEMORY_DMABUF;
+ * a userspace file descriptor associated with this buffer
+ * @planes: for multiplanar buffers; userspace pointer to the array of plane
+ * info structs for this buffer
+ * @length: size in bytes of the buffer (NOT its payload) for single-plane
+ * buffers (when type != *_MPLANE); number of elements in the
+ * planes array for multi-plane buffers
+ * @input: input number from which the video data has has been captured
+ *
+ * Contains data exchanged by application and driver using one of the Streaming
+ * I/O methods.
+ */
+struct v4l2_buffer {
+ __u32 index;
+ __u32 type;
+ __u32 bytesused;
+ __u32 flags;
+ __u32 field;
+ struct timeval timestamp;
+ struct v4l2_timecode timecode;
+ __u32 sequence;
+
+ /* memory location */
+ __u32 memory;
+ union {
+ __u32 offset;
+ unsigned long userptr;
+ struct v4l2_plane *planes;
+ __s32 fd;
+ } m;
+ __u32 length;
+ __u32 reserved2;
+ __u32 reserved;
+};
+
+/* Flags for 'flags' field */
+#define V4L2_BUF_FLAG_MAPPED 0x0001 /* Buffer is mapped (flag) */
+#define V4L2_BUF_FLAG_QUEUED 0x0002 /* Buffer is queued for processing */
+#define V4L2_BUF_FLAG_DONE 0x0004 /* Buffer is ready */
+#define V4L2_BUF_FLAG_KEYFRAME 0x0008 /* Image is a keyframe (I-frame) */
+#define V4L2_BUF_FLAG_PFRAME 0x0010 /* Image is a P-frame */
+#define V4L2_BUF_FLAG_BFRAME 0x0020 /* Image is a B-frame */
+/* Buffer is ready, but the data contained within is corrupted. */
+#define V4L2_BUF_FLAG_ERROR 0x0040
+#define V4L2_BUF_FLAG_TIMECODE 0x0100 /* timecode field is valid */
+#define V4L2_BUF_FLAG_PREPARED 0x0400 /* Buffer is prepared for queuing */
+/* Cache handling flags */
+#define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x0800
+#define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x1000
+/* Timestamp type */
+#define V4L2_BUF_FLAG_TIMESTAMP_MASK 0xe000
+#define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x0000
+#define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x2000
+#define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x4000
+
+/**
+ * struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor
+ *
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @plane: index of the plane to be exported, 0 for single plane queues
+ * @flags: flags for newly created file, currently only O_CLOEXEC is
+ * supported, refer to manual of open syscall for more details
+ * @fd: file descriptor associated with DMABUF (set by driver)
+ *
+ * Contains data used for exporting a video buffer as DMABUF file descriptor.
+ * The buffer is identified by a 'cookie' returned by VIDIOC_QUERYBUF
+ * (identical to the cookie used to mmap() the buffer to userspace). All
+ * reserved fields must be set to zero. The field reserved0 is expected to
+ * become a structure 'type' allowing an alternative layout of the structure
+ * content. Therefore this field should not be used for any other extensions.
+ */
+struct v4l2_exportbuffer {
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 index;
+ __u32 plane;
+ __u32 flags;
+ __s32 fd;
+ __u32 reserved[11];
+};
+
+/*
+ * O V E R L A Y P R E V I E W
+ */
+struct v4l2_framebuffer {
+ __u32 capability;
+ __u32 flags;
+/* FIXME: in theory we should pass something like PCI device + memory
+ * region + offset instead of some physical address */
+ void *base;
+ struct v4l2_pix_format fmt;
+};
+/* Flags for the 'capability' field. Read only */
+#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001
+#define V4L2_FBUF_CAP_CHROMAKEY 0x0002
+#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004
+#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008
+#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010
+#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020
+#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040
+#define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080
+/* Flags for the 'flags' field. */
+#define V4L2_FBUF_FLAG_PRIMARY 0x0001
+#define V4L2_FBUF_FLAG_OVERLAY 0x0002
+#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004
+#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008
+#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010
+#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020
+#define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040
+
+struct v4l2_clip {
+ struct v4l2_rect c;
+ struct v4l2_clip __user *next;
+};
+
+struct v4l2_window {
+ struct v4l2_rect w;
+ __u32 field; /* enum v4l2_field */
+ __u32 chromakey;
+ struct v4l2_clip __user *clips;
+ __u32 clipcount;
+ void __user *bitmap;
+ __u8 global_alpha;
+};
+
+/*
+ * C A P T U R E P A R A M E T E R S
+ */
+struct v4l2_captureparm {
+ __u32 capability; /* Supported modes */
+ __u32 capturemode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 readbuffers; /* # of buffers for read */
+ __u32 reserved[4];
+};
+
+/* Flags for 'capability' and 'capturemode' fields */
+#define V4L2_MODE_HIGHQUALITY 0x0001 /* High quality imaging mode */
+#define V4L2_CAP_TIMEPERFRAME 0x1000 /* timeperframe field is supported */
+
+struct v4l2_outputparm {
+ __u32 capability; /* Supported modes */
+ __u32 outputmode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 writebuffers; /* # of buffers for write */
+ __u32 reserved[4];
+};
+
+/*
+ * I N P U T I M A G E C R O P P I N G
+ */
+struct v4l2_cropcap {
+ __u32 type; /* enum v4l2_buf_type */
+ struct v4l2_rect bounds;
+ struct v4l2_rect defrect;
+ struct v4l2_fract pixelaspect;
+};
+
+struct v4l2_crop {
+ __u32 type; /* enum v4l2_buf_type */
+ struct v4l2_rect c;
+};
+
+/**
+ * struct v4l2_selection - selection info
+ * @type: buffer type (do not use *_MPLANE types)
+ * @target: Selection target, used to choose one of possible rectangles;
+ * defined in v4l2-common.h; V4L2_SEL_TGT_* .
+ * @flags: constraints flags, defined in v4l2-common.h; V4L2_SEL_FLAG_*.
+ * @r: coordinates of selection window
+ * @reserved: for future use, rounds structure size to 64 bytes, set to zero
+ *
+ * Hardware may use multiple helper windows to process a video stream.
+ * The structure is used to exchange this selection areas between
+ * an application and a driver.
+ */
+struct v4l2_selection {
+ __u32 type;
+ __u32 target;
+ __u32 flags;
+ struct v4l2_rect r;
+ __u32 reserved[9];
+};
+
+
+/*
+ * A N A L O G V I D E O S T A N D A R D
+ */
+
+typedef __u64 v4l2_std_id;
+
+/* one bit for each */
+#define V4L2_STD_PAL_B ((v4l2_std_id)0x00000001)
+#define V4L2_STD_PAL_B1 ((v4l2_std_id)0x00000002)
+#define V4L2_STD_PAL_G ((v4l2_std_id)0x00000004)
+#define V4L2_STD_PAL_H ((v4l2_std_id)0x00000008)
+#define V4L2_STD_PAL_I ((v4l2_std_id)0x00000010)
+#define V4L2_STD_PAL_D ((v4l2_std_id)0x00000020)
+#define V4L2_STD_PAL_D1 ((v4l2_std_id)0x00000040)
+#define V4L2_STD_PAL_K ((v4l2_std_id)0x00000080)
+
+#define V4L2_STD_PAL_M ((v4l2_std_id)0x00000100)
+#define V4L2_STD_PAL_N ((v4l2_std_id)0x00000200)
+#define V4L2_STD_PAL_Nc ((v4l2_std_id)0x00000400)
+#define V4L2_STD_PAL_60 ((v4l2_std_id)0x00000800)
+
+#define V4L2_STD_NTSC_M ((v4l2_std_id)0x00001000) /* BTSC */
+#define V4L2_STD_NTSC_M_JP ((v4l2_std_id)0x00002000) /* EIA-J */
+#define V4L2_STD_NTSC_443 ((v4l2_std_id)0x00004000)
+#define V4L2_STD_NTSC_M_KR ((v4l2_std_id)0x00008000) /* FM A2 */
+
+#define V4L2_STD_SECAM_B ((v4l2_std_id)0x00010000)
+#define V4L2_STD_SECAM_D ((v4l2_std_id)0x00020000)
+#define V4L2_STD_SECAM_G ((v4l2_std_id)0x00040000)
+#define V4L2_STD_SECAM_H ((v4l2_std_id)0x00080000)
+#define V4L2_STD_SECAM_K ((v4l2_std_id)0x00100000)
+#define V4L2_STD_SECAM_K1 ((v4l2_std_id)0x00200000)
+#define V4L2_STD_SECAM_L ((v4l2_std_id)0x00400000)
+#define V4L2_STD_SECAM_LC ((v4l2_std_id)0x00800000)
+
+/* ATSC/HDTV */
+#define V4L2_STD_ATSC_8_VSB ((v4l2_std_id)0x01000000)
+#define V4L2_STD_ATSC_16_VSB ((v4l2_std_id)0x02000000)
+
+/* FIXME:
+ Although std_id is 64 bits, there is an issue on PPC32 architecture that
+ makes switch(__u64) to break. So, there's a hack on v4l2-common.c rounding
+ this value to 32 bits.
+ As, currently, the max value is for V4L2_STD_ATSC_16_VSB (30 bits wide),
+ it should work fine. However, if needed to add more than two standards,
+ v4l2-common.c should be fixed.
+ */
+
+/*
+ * Some macros to merge video standards in order to make live easier for the
+ * drivers and V4L2 applications
+ */
+
+/*
+ * "Common" NTSC/M - It should be noticed that V4L2_STD_NTSC_443 is
+ * Missing here.
+ */
+#define V4L2_STD_NTSC (V4L2_STD_NTSC_M |\
+ V4L2_STD_NTSC_M_JP |\
+ V4L2_STD_NTSC_M_KR)
+/* Secam macros */
+#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
+ V4L2_STD_SECAM_K |\
+ V4L2_STD_SECAM_K1)
+/* All Secam Standards */
+#define V4L2_STD_SECAM (V4L2_STD_SECAM_B |\
+ V4L2_STD_SECAM_G |\
+ V4L2_STD_SECAM_H |\
+ V4L2_STD_SECAM_DK |\
+ V4L2_STD_SECAM_L |\
+ V4L2_STD_SECAM_LC)
+/* PAL macros */
+#define V4L2_STD_PAL_BG (V4L2_STD_PAL_B |\
+ V4L2_STD_PAL_B1 |\
+ V4L2_STD_PAL_G)
+#define V4L2_STD_PAL_DK (V4L2_STD_PAL_D |\
+ V4L2_STD_PAL_D1 |\
+ V4L2_STD_PAL_K)
+/*
+ * "Common" PAL - This macro is there to be compatible with the old
+ * V4L1 concept of "PAL": /BGDKHI.
+ * Several PAL standards are missing here: /M, /N and /Nc
+ */
+#define V4L2_STD_PAL (V4L2_STD_PAL_BG |\
+ V4L2_STD_PAL_DK |\
+ V4L2_STD_PAL_H |\
+ V4L2_STD_PAL_I)
+/* Chroma "agnostic" standards */
+#define V4L2_STD_B (V4L2_STD_PAL_B |\
+ V4L2_STD_PAL_B1 |\
+ V4L2_STD_SECAM_B)
+#define V4L2_STD_G (V4L2_STD_PAL_G |\
+ V4L2_STD_SECAM_G)
+#define V4L2_STD_H (V4L2_STD_PAL_H |\
+ V4L2_STD_SECAM_H)
+#define V4L2_STD_L (V4L2_STD_SECAM_L |\
+ V4L2_STD_SECAM_LC)
+#define V4L2_STD_GH (V4L2_STD_G |\
+ V4L2_STD_H)
+#define V4L2_STD_DK (V4L2_STD_PAL_DK |\
+ V4L2_STD_SECAM_DK)
+#define V4L2_STD_BG (V4L2_STD_B |\
+ V4L2_STD_G)
+#define V4L2_STD_MN (V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc |\
+ V4L2_STD_NTSC)
+
+/* Standards where MTS/BTSC stereo could be found */
+#define V4L2_STD_MTS (V4L2_STD_NTSC_M |\
+ V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc)
+
+/* Standards for Countries with 60Hz Line frequency */
+#define V4L2_STD_525_60 (V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_60 |\
+ V4L2_STD_NTSC |\
+ V4L2_STD_NTSC_443)
+/* Standards for Countries with 50Hz Line frequency */
+#define V4L2_STD_625_50 (V4L2_STD_PAL |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc |\
+ V4L2_STD_SECAM)
+
+#define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB |\
+ V4L2_STD_ATSC_16_VSB)
+/* Macros with none and all analog standards */
+#define V4L2_STD_UNKNOWN 0
+#define V4L2_STD_ALL (V4L2_STD_525_60 |\
+ V4L2_STD_625_50)
+
+struct v4l2_standard {
+ __u32 index;
+ v4l2_std_id id;
+ __u8 name[24];
+ struct v4l2_fract frameperiod; /* Frames, not fields */
+ __u32 framelines;
+ __u32 reserved[4];
+};
+
+/*
+ * D V B T T I M I N G S
+ */
+
+/** struct v4l2_bt_timings - BT.656/BT.1120 timing data
+ * @width: total width of the active video in pixels
+ * @height: total height of the active video in lines
+ * @interlaced: Interlaced or progressive
+ * @polarities: Positive or negative polarities
+ * @pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @hfrontporch:Horizontal front porch in pixels
+ * @hsync: Horizontal Sync length in pixels
+ * @hbackporch: Horizontal back porch in pixels
+ * @vfrontporch:Vertical front porch in lines
+ * @vsync: Vertical Sync length in lines
+ * @vbackporch: Vertical back porch in lines
+ * @il_vfrontporch:Vertical front porch for the even field
+ * (aka field 2) of interlaced field formats
+ * @il_vsync: Vertical Sync length for the even field
+ * (aka field 2) of interlaced field formats
+ * @il_vbackporch:Vertical back porch for the even field
+ * (aka field 2) of interlaced field formats
+ * @standards: Standards the timing belongs to
+ * @flags: Flags
+ * @reserved: Reserved fields, must be zeroed.
+ *
+ * A note regarding vertical interlaced timings: height refers to the total
+ * height of the active video frame (= two fields). The blanking timings refer
+ * to the blanking of each field. So the height of the total frame is
+ * calculated as follows:
+ *
+ * tot_height = height + vfrontporch + vsync + vbackporch +
+ * il_vfrontporch + il_vsync + il_vbackporch
+ *
+ * The active height of each field is height / 2.
+ */
+struct v4l2_bt_timings {
+ __u32 width;
+ __u32 height;
+ __u32 interlaced;
+ __u32 polarities;
+ __u64 pixelclock;
+ __u32 hfrontporch;
+ __u32 hsync;
+ __u32 hbackporch;
+ __u32 vfrontporch;
+ __u32 vsync;
+ __u32 vbackporch;
+ __u32 il_vfrontporch;
+ __u32 il_vsync;
+ __u32 il_vbackporch;
+ __u32 standards;
+ __u32 flags;
+ __u32 reserved[14];
+} __attribute__ ((packed));
+
+/* Interlaced or progressive format */
+#define V4L2_DV_PROGRESSIVE 0
+#define V4L2_DV_INTERLACED 1
+
+/* Polarities. If bit is not set, it is assumed to be negative polarity */
+#define V4L2_DV_VSYNC_POS_POL 0x00000001
+#define V4L2_DV_HSYNC_POS_POL 0x00000002
+
+/* Timings standards */
+#define V4L2_DV_BT_STD_CEA861 (1 << 0) /* CEA-861 Digital TV Profile */
+#define V4L2_DV_BT_STD_DMT (1 << 1) /* VESA Discrete Monitor Timings */
+#define V4L2_DV_BT_STD_CVT (1 << 2) /* VESA Coordinated Video Timings */
+#define V4L2_DV_BT_STD_GTF (1 << 3) /* VESA Generalized Timings Formula */
+
+/* Flags */
+
+/* CVT/GTF specific: timing uses reduced blanking (CVT) or the 'Secondary
+ GTF' curve (GTF). In both cases the horizontal and/or vertical blanking
+ intervals are reduced, allowing a higher resolution over the same
+ bandwidth. This is a read-only flag. */
+#define V4L2_DV_FL_REDUCED_BLANKING (1 << 0)
+/* CEA-861 specific: set for CEA-861 formats with a framerate of a multiple
+ of six. These formats can be optionally played at 1 / 1.001 speed.
+ This is a read-only flag. */
+#define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1)
+/* CEA-861 specific: only valid for video transmitters, the flag is cleared
+ by receivers.
+ If the framerate of the format is a multiple of six, then the pixelclock
+ used to set up the transmitter is divided by 1.001 to make it compatible
+ with 60 Hz based standards such as NTSC and PAL-M that use a framerate of
+ 29.97 Hz. Otherwise this flag is cleared. If the transmitter can't generate
+ such frequencies, then the flag will also be cleared. */
+#define V4L2_DV_FL_REDUCED_FPS (1 << 2)
+/* Specific to interlaced formats: if set, then field 1 is really one half-line
+ longer and field 2 is really one half-line shorter, so each field has
+ exactly the same number of half-lines. Whether half-lines can be detected
+ or used depends on the hardware. */
+#define V4L2_DV_FL_HALF_LINE (1 << 3)
+
+/* A few useful defines to calculate the total blanking and frame sizes */
+#define V4L2_DV_BT_BLANKING_WIDTH(bt) \
+ (bt->hfrontporch + bt->hsync + bt->hbackporch)
+#define V4L2_DV_BT_FRAME_WIDTH(bt) \
+ (bt->width + V4L2_DV_BT_BLANKING_WIDTH(bt))
+#define V4L2_DV_BT_BLANKING_HEIGHT(bt) \
+ (bt->vfrontporch + bt->vsync + bt->vbackporch + \
+ bt->il_vfrontporch + bt->il_vsync + bt->il_vbackporch)
+#define V4L2_DV_BT_FRAME_HEIGHT(bt) \
+ (bt->height + V4L2_DV_BT_BLANKING_HEIGHT(bt))
+
+/** struct v4l2_dv_timings - DV timings
+ * @type: the type of the timings
+ * @bt: BT656/1120 timings
+ */
+struct v4l2_dv_timings {
+ __u32 type;
+ union {
+ struct v4l2_bt_timings bt;
+ __u32 reserved[32];
+ };
+} __attribute__ ((packed));
+
+/* Values for the type field */
+#define V4L2_DV_BT_656_1120 0 /* BT.656/1120 timing type */
+
+
+/** struct v4l2_enum_dv_timings - DV timings enumeration
+ * @index: enumeration index
+ * @reserved: must be zeroed
+ * @timings: the timings for the given index
+ */
+struct v4l2_enum_dv_timings {
+ __u32 index;
+ __u32 reserved[3];
+ struct v4l2_dv_timings timings;
+};
+
+/** struct v4l2_bt_timings_cap - BT.656/BT.1120 timing capabilities
+ * @min_width: width in pixels
+ * @max_width: width in pixels
+ * @min_height: height in lines
+ * @max_height: height in lines
+ * @min_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @max_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @standards: Supported standards
+ * @capabilities: Supported capabilities
+ * @reserved: Must be zeroed
+ */
+struct v4l2_bt_timings_cap {
+ __u32 min_width;
+ __u32 max_width;
+ __u32 min_height;
+ __u32 max_height;
+ __u64 min_pixelclock;
+ __u64 max_pixelclock;
+ __u32 standards;
+ __u32 capabilities;
+ __u32 reserved[16];
+} __attribute__ ((packed));
+
+/* Supports interlaced formats */
+#define V4L2_DV_BT_CAP_INTERLACED (1 << 0)
+/* Supports progressive formats */
+#define V4L2_DV_BT_CAP_PROGRESSIVE (1 << 1)
+/* Supports CVT/GTF reduced blanking */
+#define V4L2_DV_BT_CAP_REDUCED_BLANKING (1 << 2)
+/* Supports custom formats */
+#define V4L2_DV_BT_CAP_CUSTOM (1 << 3)
+
+/** struct v4l2_dv_timings_cap - DV timings capabilities
+ * @type: the type of the timings (same as in struct v4l2_dv_timings)
+ * @bt: the BT656/1120 timings capabilities
+ */
+struct v4l2_dv_timings_cap {
+ __u32 type;
+ __u32 reserved[3];
+ union {
+ struct v4l2_bt_timings_cap bt;
+ __u32 raw_data[32];
+ };
+};
+
+
+/*
+ * V I D E O I N P U T S
+ */
+struct v4l2_input {
+ __u32 index; /* Which input */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of input */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 tuner; /* enum v4l2_tuner_type */
+ v4l2_std_id std;
+ __u32 status;
+ __u32 capabilities;
+ __u32 reserved[3];
+};
+
+/* Values for the 'type' field */
+#define V4L2_INPUT_TYPE_TUNER 1
+#define V4L2_INPUT_TYPE_CAMERA 2
+
+/* field 'status' - general */
+#define V4L2_IN_ST_NO_POWER 0x00000001 /* Attached device is off */
+#define V4L2_IN_ST_NO_SIGNAL 0x00000002
+#define V4L2_IN_ST_NO_COLOR 0x00000004
+
+/* field 'status' - sensor orientation */
+/* If sensor is mounted upside down set both bits */
+#define V4L2_IN_ST_HFLIP 0x00000010 /* Frames are flipped horizontally */
+#define V4L2_IN_ST_VFLIP 0x00000020 /* Frames are flipped vertically */
+
+/* field 'status' - analog */
+#define V4L2_IN_ST_NO_H_LOCK 0x00000100 /* No horizontal sync lock */
+#define V4L2_IN_ST_COLOR_KILL 0x00000200 /* Color killer is active */
+
+/* field 'status' - digital */
+#define V4L2_IN_ST_NO_SYNC 0x00010000 /* No synchronization lock */
+#define V4L2_IN_ST_NO_EQU 0x00020000 /* No equalizer lock */
+#define V4L2_IN_ST_NO_CARRIER 0x00040000 /* Carrier recovery failed */
+
+/* field 'status' - VCR and set-top box */
+#define V4L2_IN_ST_MACROVISION 0x01000000 /* Macrovision detected */
+#define V4L2_IN_ST_NO_ACCESS 0x02000000 /* Conditional access denied */
+#define V4L2_IN_ST_VTR 0x04000000 /* VTR time constant */
+
+/* capabilities flags */
+#define V4L2_IN_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */
+#define V4L2_IN_CAP_CUSTOM_TIMINGS V4L2_IN_CAP_DV_TIMINGS /* For compatibility */
+#define V4L2_IN_CAP_STD 0x00000004 /* Supports S_STD */
+
+/*
+ * V I D E O O U T P U T S
+ */
+struct v4l2_output {
+ __u32 index; /* Which output */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of output */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 modulator; /* Associated modulator */
+ v4l2_std_id std;
+ __u32 capabilities;
+ __u32 reserved[3];
+};
+/* Values for the 'type' field */
+#define V4L2_OUTPUT_TYPE_MODULATOR 1
+#define V4L2_OUTPUT_TYPE_ANALOG 2
+#define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3
+
+/* capabilities flags */
+#define V4L2_OUT_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */
+#define V4L2_OUT_CAP_CUSTOM_TIMINGS V4L2_OUT_CAP_DV_TIMINGS /* For compatibility */
+#define V4L2_OUT_CAP_STD 0x00000004 /* Supports S_STD */
+
+/*
+ * C O N T R O L S
+ */
+struct v4l2_control {
+ __u32 id;
+ __s32 value;
+};
+
+struct v4l2_ext_control {
+ __u32 id;
+ __u32 size;
+ __u32 reserved2[1];
+ union {
+ __s32 value;
+ __s64 value64;
+ char *string;
+ };
+} __attribute__ ((packed));
+
+struct v4l2_ext_controls {
+ __u32 ctrl_class;
+ __u32 count;
+ __u32 error_idx;
+ __u32 reserved[2];
+ struct v4l2_ext_control *controls;
+};
+
+#define V4L2_CTRL_ID_MASK (0x0fffffff)
+#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
+#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
+
+enum v4l2_ctrl_type {
+ V4L2_CTRL_TYPE_INTEGER = 1,
+ V4L2_CTRL_TYPE_BOOLEAN = 2,
+ V4L2_CTRL_TYPE_MENU = 3,
+ V4L2_CTRL_TYPE_BUTTON = 4,
+ V4L2_CTRL_TYPE_INTEGER64 = 5,
+ V4L2_CTRL_TYPE_CTRL_CLASS = 6,
+ V4L2_CTRL_TYPE_STRING = 7,
+ V4L2_CTRL_TYPE_BITMASK = 8,
+ V4L2_CTRL_TYPE_INTEGER_MENU = 9,
+};
+
+/* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */
+struct v4l2_queryctrl {
+ __u32 id;
+ __u32 type; /* enum v4l2_ctrl_type */
+ __u8 name[32]; /* Whatever */
+ __s32 minimum; /* Note signedness */
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+ __u32 flags;
+ __u32 reserved[2];
+};
+
+/* Used in the VIDIOC_QUERYMENU ioctl for querying menu items */
+struct v4l2_querymenu {
+ __u32 id;
+ __u32 index;
+ union {
+ __u8 name[32]; /* Whatever */
+ __s64 value;
+ };
+ __u32 reserved;
+} __attribute__ ((packed));
+
+/* Control flags */
+#define V4L2_CTRL_FLAG_DISABLED 0x0001
+#define V4L2_CTRL_FLAG_GRABBED 0x0002
+#define V4L2_CTRL_FLAG_READ_ONLY 0x0004
+#define V4L2_CTRL_FLAG_UPDATE 0x0008
+#define V4L2_CTRL_FLAG_INACTIVE 0x0010
+#define V4L2_CTRL_FLAG_SLIDER 0x0020
+#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
+#define V4L2_CTRL_FLAG_VOLATILE 0x0080
+
+/* Query flag, to be ORed with the control ID */
+#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000
+
+/* User-class control IDs defined by V4L2 */
+#define V4L2_CID_MAX_CTRLS 1024
+/* IDs reserved for driver specific controls */
+#define V4L2_CID_PRIVATE_BASE 0x08000000
+
+
+/*
+ * T U N I N G
+ */
+struct v4l2_tuner {
+ __u32 index;
+ __u8 name[32];
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 rxsubchans;
+ __u32 audmode;
+ __s32 signal;
+ __s32 afc;
+ __u32 reserved[4];
+};
+
+struct v4l2_modulator {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 txsubchans;
+ __u32 reserved[4];
+};
+
+/* Flags for the 'capability' field */
+#define V4L2_TUNER_CAP_LOW 0x0001
+#define V4L2_TUNER_CAP_NORM 0x0002
+#define V4L2_TUNER_CAP_HWSEEK_BOUNDED 0x0004
+#define V4L2_TUNER_CAP_HWSEEK_WRAP 0x0008
+#define V4L2_TUNER_CAP_STEREO 0x0010
+#define V4L2_TUNER_CAP_LANG2 0x0020
+#define V4L2_TUNER_CAP_SAP 0x0020
+#define V4L2_TUNER_CAP_LANG1 0x0040
+#define V4L2_TUNER_CAP_RDS 0x0080
+#define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100
+#define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200
+#define V4L2_TUNER_CAP_FREQ_BANDS 0x0400
+#define V4L2_TUNER_CAP_HWSEEK_PROG_LIM 0x0800
+
+/* Flags for the 'rxsubchans' field */
+#define V4L2_TUNER_SUB_MONO 0x0001
+#define V4L2_TUNER_SUB_STEREO 0x0002
+#define V4L2_TUNER_SUB_LANG2 0x0004
+#define V4L2_TUNER_SUB_SAP 0x0004
+#define V4L2_TUNER_SUB_LANG1 0x0008
+#define V4L2_TUNER_SUB_RDS 0x0010
+
+/* Values for the 'audmode' field */
+#define V4L2_TUNER_MODE_MONO 0x0000
+#define V4L2_TUNER_MODE_STEREO 0x0001
+#define V4L2_TUNER_MODE_LANG2 0x0002
+#define V4L2_TUNER_MODE_SAP 0x0002
+#define V4L2_TUNER_MODE_LANG1 0x0003
+#define V4L2_TUNER_MODE_LANG1_LANG2 0x0004
+
+struct v4l2_frequency {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 frequency;
+ __u32 reserved[8];
+};
+
+#define V4L2_BAND_MODULATION_VSB (1 << 1)
+#define V4L2_BAND_MODULATION_FM (1 << 2)
+#define V4L2_BAND_MODULATION_AM (1 << 3)
+
+struct v4l2_frequency_band {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 index;
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 modulation;
+ __u32 reserved[9];
+};
+
+struct v4l2_hw_freq_seek {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 seek_upward;
+ __u32 wrap_around;
+ __u32 spacing;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 reserved[5];
+};
+
+/*
+ * R D S
+ */
+
+struct v4l2_rds_data {
+ __u8 lsb;
+ __u8 msb;
+ __u8 block;
+} __attribute__ ((packed));
+
+#define V4L2_RDS_BLOCK_MSK 0x7
+#define V4L2_RDS_BLOCK_A 0
+#define V4L2_RDS_BLOCK_B 1
+#define V4L2_RDS_BLOCK_C 2
+#define V4L2_RDS_BLOCK_D 3
+#define V4L2_RDS_BLOCK_C_ALT 4
+#define V4L2_RDS_BLOCK_INVALID 7
+
+#define V4L2_RDS_BLOCK_CORRECTED 0x40
+#define V4L2_RDS_BLOCK_ERROR 0x80
+
+/*
+ * A U D I O
+ */
+struct v4l2_audio {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+};
+
+/* Flags for the 'capability' field */
+#define V4L2_AUDCAP_STEREO 0x00001
+#define V4L2_AUDCAP_AVL 0x00002
+
+/* Flags for the 'mode' field */
+#define V4L2_AUDMODE_AVL 0x00001
+
+struct v4l2_audioout {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+};
+
+/*
+ * M P E G S E R V I C E S
+ *
+ * NOTE: EXPERIMENTAL API
+ */
+#if 1
+#define V4L2_ENC_IDX_FRAME_I (0)
+#define V4L2_ENC_IDX_FRAME_P (1)
+#define V4L2_ENC_IDX_FRAME_B (2)
+#define V4L2_ENC_IDX_FRAME_MASK (0xf)
+
+struct v4l2_enc_idx_entry {
+ __u64 offset;
+ __u64 pts;
+ __u32 length;
+ __u32 flags;
+ __u32 reserved[2];
+};
+
+#define V4L2_ENC_IDX_ENTRIES (64)
+struct v4l2_enc_idx {
+ __u32 entries;
+ __u32 entries_cap;
+ __u32 reserved[4];
+ struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES];
+};
+
+
+#define V4L2_ENC_CMD_START (0)
+#define V4L2_ENC_CMD_STOP (1)
+#define V4L2_ENC_CMD_PAUSE (2)
+#define V4L2_ENC_CMD_RESUME (3)
+
+/* Flags for V4L2_ENC_CMD_STOP */
+#define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0)
+
+struct v4l2_encoder_cmd {
+ __u32 cmd;
+ __u32 flags;
+ union {
+ struct {
+ __u32 data[8];
+ } raw;
+ };
+};
+
+/* Decoder commands */
+#define V4L2_DEC_CMD_START (0)
+#define V4L2_DEC_CMD_STOP (1)
+#define V4L2_DEC_CMD_PAUSE (2)
+#define V4L2_DEC_CMD_RESUME (3)
+
+/* Flags for V4L2_DEC_CMD_START */
+#define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0)
+
+/* Flags for V4L2_DEC_CMD_PAUSE */
+#define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0)
+
+/* Flags for V4L2_DEC_CMD_STOP */
+#define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0)
+#define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1)
+
+/* Play format requirements (returned by the driver): */
+
+/* The decoder has no special format requirements */
+#define V4L2_DEC_START_FMT_NONE (0)
+/* The decoder requires full GOPs */
+#define V4L2_DEC_START_FMT_GOP (1)
+
+/* The structure must be zeroed before use by the application
+ This ensures it can be extended safely in the future. */
+struct v4l2_decoder_cmd {
+ __u32 cmd;
+ __u32 flags;
+ union {
+ struct {
+ __u64 pts;
+ } stop;
+
+ struct {
+ /* 0 or 1000 specifies normal speed,
+ 1 specifies forward single stepping,
+ -1 specifies backward single stepping,
+ >1: playback at speed/1000 of the normal speed,
+ <-1: reverse playback at (-speed/1000) of the normal speed. */
+ __s32 speed;
+ __u32 format;
+ } start;
+
+ struct {
+ __u32 data[16];
+ } raw;
+ };
+};
+#endif
+
+
+/*
+ * D A T A S E R V I C E S ( V B I )
+ *
+ * Data services API by Michael Schimek
+ */
+
+/* Raw VBI */
+struct v4l2_vbi_format {
+ __u32 sampling_rate; /* in 1 Hz */
+ __u32 offset;
+ __u32 samples_per_line;
+ __u32 sample_format; /* V4L2_PIX_FMT_* */
+ __s32 start[2];
+ __u32 count[2];
+ __u32 flags; /* V4L2_VBI_* */
+ __u32 reserved[2]; /* must be zero */
+};
+
+/* VBI flags */
+#define V4L2_VBI_UNSYNC (1 << 0)
+#define V4L2_VBI_INTERLACED (1 << 1)
+
+/* Sliced VBI
+ *
+ * This implements is a proposal V4L2 API to allow SLICED VBI
+ * required for some hardware encoders. It should change without
+ * notice in the definitive implementation.
+ */
+
+struct v4l2_sliced_vbi_format {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 io_size;
+ __u32 reserved[2]; /* must be zero */
+};
+
+/* Teletext World System Teletext
+ (WST), defined on ITU-R BT.653-2 */
+#define V4L2_SLICED_TELETEXT_B (0x0001)
+/* Video Program System, defined on ETS 300 231*/
+#define V4L2_SLICED_VPS (0x0400)
+/* Closed Caption, defined on EIA-608 */
+#define V4L2_SLICED_CAPTION_525 (0x1000)
+/* Wide Screen System, defined on ITU-R BT1119.1 */
+#define V4L2_SLICED_WSS_625 (0x4000)
+
+#define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525)
+#define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625)
+
+struct v4l2_sliced_vbi_cap {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 reserved[3]; /* must be 0 */
+};
+
+struct v4l2_sliced_vbi_data {
+ __u32 id;
+ __u32 field; /* 0: first field, 1: second field */
+ __u32 line; /* 1-23 */
+ __u32 reserved; /* must be 0 */
+ __u8 data[48];
+};
+
+/*
+ * Sliced VBI data inserted into MPEG Streams
+ */
+
+/*
+ * V4L2_MPEG_STREAM_VBI_FMT_IVTV:
+ *
+ * Structure of payload contained in an MPEG 2 Private Stream 1 PES Packet in an
+ * MPEG-2 Program Pack that contains V4L2_MPEG_STREAM_VBI_FMT_IVTV Sliced VBI
+ * data
+ *
+ * Note, the MPEG-2 Program Pack and Private Stream 1 PES packet header
+ * definitions are not included here. See the MPEG-2 specifications for details
+ * on these headers.
+ */
+
+/* Line type IDs */
+#define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1)
+#define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4)
+#define V4L2_MPEG_VBI_IVTV_WSS_625 (5)
+#define V4L2_MPEG_VBI_IVTV_VPS (7)
+
+struct v4l2_mpeg_vbi_itv0_line {
+ __u8 id; /* One of V4L2_MPEG_VBI_IVTV_* above */
+ __u8 data[42]; /* Sliced VBI data for the line */
+} __attribute__ ((packed));
+
+struct v4l2_mpeg_vbi_itv0 {
+ __le32 linemask[2]; /* Bitmasks of VBI service lines present */
+ struct v4l2_mpeg_vbi_itv0_line line[35];
+} __attribute__ ((packed));
+
+struct v4l2_mpeg_vbi_ITV0 {
+ struct v4l2_mpeg_vbi_itv0_line line[36];
+} __attribute__ ((packed));
+
+#define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0"
+#define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0"
+
+struct v4l2_mpeg_vbi_fmt_ivtv {
+ __u8 magic[4];
+ union {
+ struct v4l2_mpeg_vbi_itv0 itv0;
+ struct v4l2_mpeg_vbi_ITV0 ITV0;
+ };
+} __attribute__ ((packed));
+
+/*
+ * A G G R E G A T E S T R U C T U R E S
+ */
+
+/**
+ * struct v4l2_plane_pix_format - additional, per-plane format definition
+ * @sizeimage: maximum size in bytes required for data, for which
+ * this plane will be used
+ * @bytesperline: distance in bytes between the leftmost pixels in two
+ * adjacent lines
+ */
+struct v4l2_plane_pix_format {
+ __u32 sizeimage;
+ __u16 bytesperline;
+ __u16 reserved[7];
+} __attribute__ ((packed));
+
+/**
+ * struct v4l2_pix_format_mplane - multiplanar format definition
+ * @width: image width in pixels
+ * @height: image height in pixels
+ * @pixelformat: little endian four character code (fourcc)
+ * @field: enum v4l2_field; field order (for interlaced video)
+ * @colorspace: enum v4l2_colorspace; supplemental to pixelformat
+ * @plane_fmt: per-plane information
+ * @num_planes: number of planes for this format
+ */
+struct v4l2_pix_format_mplane {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field;
+ __u32 colorspace;
+
+ struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES];
+ __u8 num_planes;
+ __u8 reserved[11];
+} __attribute__ ((packed));
+
+/**
+ * struct v4l2_format - stream data format
+ * @type: enum v4l2_buf_type; type of the data stream
+ * @pix: definition of an image format
+ * @pix_mp: definition of a multiplanar image format
+ * @win: definition of an overlaid image
+ * @vbi: raw VBI capture or output parameters
+ * @sliced: sliced VBI capture or output parameters
+ * @raw_data: placeholder for future extensions and custom formats
+ */
+struct v4l2_format {
+ __u32 type;
+ union {
+ struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */
+ struct v4l2_pix_format_mplane pix_mp; /* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE */
+ struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */
+ struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
+ struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
+ __u8 raw_data[200]; /* user-defined */
+ } fmt;
+};
+
+/* Stream type-dependent parameters
+ */
+struct v4l2_streamparm {
+ __u32 type; /* enum v4l2_buf_type */
+ union {
+ struct v4l2_captureparm capture;
+ struct v4l2_outputparm output;
+ __u8 raw_data[200]; /* user-defined */
+ } parm;
+};
+
+/*
+ * E V E N T S
+ */
+
+#define V4L2_EVENT_ALL 0
+#define V4L2_EVENT_VSYNC 1
+#define V4L2_EVENT_EOS 2
+#define V4L2_EVENT_CTRL 3
+#define V4L2_EVENT_FRAME_SYNC 4
+#define V4L2_EVENT_PRIVATE_START 0x08000000
+
+/* Payload for V4L2_EVENT_VSYNC */
+struct v4l2_event_vsync {
+ /* Can be V4L2_FIELD_ANY, _NONE, _TOP or _BOTTOM */
+ __u8 field;
+} __attribute__ ((packed));
+
+/* Payload for V4L2_EVENT_CTRL */
+#define V4L2_EVENT_CTRL_CH_VALUE (1 << 0)
+#define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1)
+#define V4L2_EVENT_CTRL_CH_RANGE (1 << 2)
+
+struct v4l2_event_ctrl {
+ __u32 changes;
+ __u32 type;
+ union {
+ __s32 value;
+ __s64 value64;
+ };
+ __u32 flags;
+ __s32 minimum;
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+};
+
+struct v4l2_event_frame_sync {
+ __u32 frame_sequence;
+};
+
+struct v4l2_event {
+ __u32 type;
+ union {
+ struct v4l2_event_vsync vsync;
+ struct v4l2_event_ctrl ctrl;
+ struct v4l2_event_frame_sync frame_sync;
+ __u8 data[64];
+ } u;
+ __u32 pending;
+ __u32 sequence;
+ struct timespec timestamp;
+ __u32 id;
+ __u32 reserved[8];
+};
+
+#define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0)
+#define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1)
+
+struct v4l2_event_subscription {
+ __u32 type;
+ __u32 id;
+ __u32 flags;
+ __u32 reserved[5];
+};
+
+/*
+ * A D V A N C E D D E B U G G I N G
+ *
+ * NOTE: EXPERIMENTAL API, NEVER RELY ON THIS IN APPLICATIONS!
+ * FOR DEBUGGING, TESTING AND INTERNAL USE ONLY!
+ */
+
+/* VIDIOC_DBG_G_REGISTER and VIDIOC_DBG_S_REGISTER */
+
+#define V4L2_CHIP_MATCH_BRIDGE 0 /* Match against chip ID on the bridge (0 for the bridge) */
+#define V4L2_CHIP_MATCH_SUBDEV 4 /* Match against subdev index */
+
+/* The following four defines are no longer in use */
+#define V4L2_CHIP_MATCH_HOST V4L2_CHIP_MATCH_BRIDGE
+#define V4L2_CHIP_MATCH_I2C_DRIVER 1 /* Match against I2C driver name */
+#define V4L2_CHIP_MATCH_I2C_ADDR 2 /* Match against I2C 7-bit address */
+#define V4L2_CHIP_MATCH_AC97 3 /* Match against ancillary AC97 chip */
+
+struct v4l2_dbg_match {
+ __u32 type; /* Match type */
+ union { /* Match this chip, meaning determined by type */
+ __u32 addr;
+ char name[32];
+ };
+} __attribute__ ((packed));
+
+struct v4l2_dbg_register {
+ struct v4l2_dbg_match match;
+ __u32 size; /* register size in bytes */
+ __u64 reg;
+ __u64 val;
+} __attribute__ ((packed));
+
+#define V4L2_CHIP_FL_READABLE (1 << 0)
+#define V4L2_CHIP_FL_WRITABLE (1 << 1)
+
+/* VIDIOC_DBG_G_CHIP_INFO */
+struct v4l2_dbg_chip_info {
+ struct v4l2_dbg_match match;
+ char name[32];
+ __u32 flags;
+ __u32 reserved[32];
+} __attribute__ ((packed));
+
+/**
+ * struct v4l2_create_buffers - VIDIOC_CREATE_BUFS argument
+ * @index: on return, index of the first created buffer
+ * @count: entry: number of requested buffers,
+ * return: number of created buffers
+ * @memory: enum v4l2_memory; buffer memory type
+ * @format: frame format, for which buffers are requested
+ * @reserved: future extensions
+ */
+struct v4l2_create_buffers {
+ __u32 index;
+ __u32 count;
+ __u32 memory;
+ struct v4l2_format format;
+ __u32 reserved[8];
+};
+
+/*
+ * I O C T L C O D E S F O R V I D E O D E V I C E S
+ *
+ */
+#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
+#define VIDIOC_RESERVED _IO('V', 1)
+#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
+#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
+#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
+#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers)
+#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer)
+#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer)
+#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer)
+#define VIDIOC_OVERLAY _IOW('V', 14, int)
+#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer)
+#define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer)
+#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer)
+#define VIDIOC_STREAMON _IOW('V', 18, int)
+#define VIDIOC_STREAMOFF _IOW('V', 19, int)
+#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm)
+#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm)
+#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id)
+#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id)
+#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard)
+#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input)
+#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control)
+#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control)
+#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner)
+#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner)
+#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio)
+#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio)
+#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl)
+#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu)
+#define VIDIOC_G_INPUT _IOR('V', 38, int)
+#define VIDIOC_S_INPUT _IOWR('V', 39, int)
+#define VIDIOC_G_OUTPUT _IOR('V', 46, int)
+#define VIDIOC_S_OUTPUT _IOWR('V', 47, int)
+#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output)
+#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout)
+#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout)
+#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator)
+#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator)
+#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency)
+#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency)
+#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap)
+#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop)
+#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
+#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
+#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
+#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
+#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
+#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
+#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
+#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */
+#define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) /* enum v4l2_priority */
+#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap)
+#define VIDIOC_LOG_STATUS _IO('V', 70)
+#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls)
+#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls)
+#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls)
+#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum)
+#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum)
+#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx)
+#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd)
+#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd)
+
+/* Experimental, meant for debugging, testing and internal use.
+ Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
+ You must be root to use these ioctls. Never use these in applications! */
+#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
+#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
+
+#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
+
+#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings)
+#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings)
+#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event)
+#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription)
+#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription)
+
+/* Experimental, the below two ioctls may change over the next couple of kernel
+ versions */
+#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers)
+#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer)
+
+/* Experimental selection API */
+#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection)
+#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection)
+
+/* Experimental, these two ioctls may change over the next couple of kernel
+ versions. */
+#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd)
+#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd)
+
+/* Experimental, these three ioctls may change over the next couple of kernel
+ versions. */
+#define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings)
+#define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings)
+#define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap)
+
+/* Experimental, this ioctl may change over the next couple of kernel
+ versions. */
+#define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band)
+
+/* Experimental, meant for debugging, testing and internal use.
+ Never use these in applications! */
+#define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info)
+
+/* Reminder: when adding new ioctls please add support for them to
+ drivers/media/video/v4l2-compat-ioctl32.c as well! */
+
+#define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */
+
+#endif /* _UAPI__LINUX_VIDEODEV2_H */
diff --git a/libtiutils/Android.mk b/libtiutils/Android.mk
new file mode 100644
index 0000000..768a66e
--- /dev/null
+++ b/libtiutils/Android.mk
@@ -0,0 +1,49 @@
+################################################
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_SRC_FILES:= \
+ DebugUtils.cpp \
+ MessageQueue.cpp \
+ Semaphore.cpp \
+ ErrorUtils.cpp
+
+LOCAL_SHARED_LIBRARIES:= \
+ libdl \
+ libui \
+ libbinder \
+ libutils \
+ libcutils
+
+ifdef ANDROID_API_JB_OR_LATER
+LOCAL_C_INCLUDES += \
+ frameworks/native/include \
+ frameworks/native/include/media/openmax
+else
+LOCAL_C_INCLUDES += \
+ frameworks/base/include
+endif
+
+LOCAL_C_INCLUDES += \
+ bionic/libc/include
+
+LOCAL_CFLAGS += -fno-short-enums $(ANDROID_API_CFLAGS)
+
+ifdef TI_UTILS_MESSAGE_QUEUE_DEBUG_ENABLED
+ # Enable debug logs
+ LOCAL_CFLAGS += -DMSGQ_DEBUG
+endif
+
+ifdef TI_UTILS_MESSAGE_QUEUE_DEBUG_FUNCTION_NAMES
+ # Enable function enter/exit logging
+ LOCAL_CFLAGS += -DTI_UTILS_FUNCTION_LOGGER_ENABLE
+endif
+
+LOCAL_MODULE:= libtiutils
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/libtiutils/DebugUtils.cpp b/libtiutils/DebugUtils.cpp
new file mode 100644
index 0000000..60ad0c8
--- /dev/null
+++ b/libtiutils/DebugUtils.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DebugUtils.h"
+
+#include "utils/Debug.h"
+
+
+
+
+namespace Ti {
+
+
+
+
+// shared const buffer with spaces for indentation string
+extern const char sIndentStringBuffer[] =
+ " "
+ " ";
+template class android::CompileTimeAssert<sizeof(sIndentStringBuffer) - 1 == kIndentStringMaxLength>;
+
+
+
+
+static const int kDebugThreadInfoGrowSize = 16;
+
+
+
+
+Debug Debug::sInstance;
+
+
+
+
+Debug::Debug()
+{
+ grow();
+}
+
+
+void Debug::grow()
+{
+ android::AutoMutex locker(mMutex);
+ (void)locker;
+
+ const int size = kDebugThreadInfoGrowSize;
+
+ const int newSize = (mData.get() ? mData->threads.size() : 0) + size;
+
+ Data * const newData = new Data;
+ newData->threads.setCapacity(newSize);
+
+ // insert previous thread info pointers
+ if ( mData.get() )
+ newData->threads.insertVectorAt(mData->threads, 0);
+
+ // populate data with new thread infos
+ for ( int i = 0; i < size; ++i )
+ newData->threads.add(new ThreadInfo);
+
+ // replace old data with new one
+ mData = newData;
+}
+
+
+Debug::ThreadInfo * Debug::registerThread(Data * const data, const int32_t threadId)
+{
+ const int size = data->threads.size();
+ for ( int i = 0; i < size; ++i )
+ {
+ ThreadInfo * const threadInfo = data->threads.itemAt(i);
+ if ( android_atomic_acquire_cas(0, threadId, &threadInfo->threadId) == 0 )
+ return threadInfo;
+ }
+
+ // failed to find empty slot for thread
+ return 0;
+}
+
+
+
+
+} // namespace Ti
diff --git a/libtiutils/DebugUtils.h b/libtiutils/DebugUtils.h
new file mode 100644
index 0000000..a05ba8d
--- /dev/null
+++ b/libtiutils/DebugUtils.h
@@ -0,0 +1,397 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DEBUG_UTILS_H
+#define DEBUG_UTILS_H
+
+#include <android/log.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+
+
+
+
+namespace Ti {
+
+
+
+
+// use 2 space characters for call stack indent
+static const int kFunctionLoggerIndentSize = 2;
+
+
+
+
+template <int Size = kFunctionLoggerIndentSize>
+class IndentString
+{
+public:
+ IndentString(int length);
+
+ const char * string() const;
+
+private:
+ int calculateOffset(int length) const;
+
+private:
+ const int mOffset;
+};
+
+
+
+
+class Debug
+{
+public:
+ static Debug * instance();
+
+ int offsetForCurrentThread();
+ void log(int priority, const char * format, ...);
+
+private:
+ class ThreadInfo
+ {
+ public:
+ ThreadInfo() :
+ threadId(0), callOffset(0)
+ {}
+
+ volatile int32_t threadId;
+ int callOffset;
+ };
+
+ class Data : public android::RefBase
+ {
+ public:
+ android::Vector<ThreadInfo*> threads;
+ };
+
+private:
+ // called from FunctionLogger
+ void increaseOffsetForCurrentThread();
+ void decreaseOffsetForCurrentThread();
+
+private:
+ Debug();
+
+ void grow();
+ ThreadInfo * registerThread(Data * data, int32_t threadId);
+ ThreadInfo * findCurrentThreadInfo();
+ void addOffsetForCurrentThread(int offset);
+
+private:
+ static Debug sInstance;
+
+ mutable android::Mutex mMutex;
+ android::sp<Data> mData;
+
+ friend class FunctionLogger;
+};
+
+
+
+
+class FunctionLogger
+{
+public:
+ FunctionLogger(const char * file, int line, const char * function);
+ ~FunctionLogger();
+
+ void setExitLine(int line);
+
+private:
+ const char * const mFile;
+ const int mLine;
+ const char * const mFunction;
+ const void * const mThreadId;
+ int mExitLine;
+};
+
+
+
+
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+# define LOG_FUNCTION_NAME Ti::FunctionLogger __function_logger_instance(__FILE__, __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT __function_logger_instance.setExitLine(__LINE__);
+#else
+# define LOG_FUNCTION_NAME int __function_logger_instance;
+# define LOG_FUNCTION_NAME_EXIT (void*)__function_logger_instance;
+#endif
+
+#ifdef TI_UTILS_DEBUG_USE_TIMESTAMPS
+ // truncate timestamp to 1000 seconds to fit into 6 characters
+# define TI_UTILS_DEBUG_TIMESTAMP_TOKEN "[%06d] "
+# define TI_UTILS_DEBUG_TIMESTAMP_VARIABLE static_cast<int>(nanoseconds_to_milliseconds(systemTime()) % 1000000),
+#else
+# define TI_UTILS_DEBUG_TIMESTAMP_TOKEN
+# define TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+#endif
+
+
+
+
+#define DBGUTILS_LOGV_FULL(priority, file, line, function, format, ...) \
+ do \
+ { \
+ Ti::Debug * const debug = Ti::Debug::instance(); \
+ debug->log(priority, format, \
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE \
+ reinterpret_cast<int>(androidGetThreadId()), \
+ Ti::IndentString<>(debug->offsetForCurrentThread()).string(), \
+ file, line, function, __VA_ARGS__); \
+ } while (0)
+
+#define DBGUTILS_LOGV(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_VERBOSE, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGD(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_DEBUG, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGI(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_INFO, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGW(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_WARN, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGE(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_ERROR, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGF(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_FATAL, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+
+#define DBGUTILS_LOGVA DBGUTILS_LOGV
+#define DBGUTILS_LOGVB DBGUTILS_LOGV
+
+#define DBGUTILS_LOGDA DBGUTILS_LOGD
+#define DBGUTILS_LOGDB DBGUTILS_LOGD
+
+#define DBGUTILS_LOGEA DBGUTILS_LOGE
+#define DBGUTILS_LOGEB DBGUTILS_LOGE
+
+// asserts
+#define _DBGUTILS_PLAIN_ASSERT(condition) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Condition failed: " #condition); \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+#define _DBGUTILS_PLAIN_ASSERT_X(condition, ...) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Condition failed: " #condition ": " __VA_ARGS__); \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+#define DBGUTILS_ASSERT(condition) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ DBGUTILS_LOGF("Condition failed: " #condition); \
+ DBGUTILS_LOGF("Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+#define DBGUTILS_ASSERT_X(condition, ...) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ DBGUTILS_LOGF("Condition failed: " #condition ": " __VA_ARGS__); \
+ DBGUTILS_LOGF("Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+
+
+
+static const int kIndentStringMaxLength = 128;
+
+template <int Size>
+inline int IndentString<Size>::calculateOffset(const int length) const
+{
+ const int offset = kIndentStringMaxLength - length*Size;
+ return offset < 0 ? 0 : offset;
+}
+
+template <int Size>
+inline IndentString<Size>::IndentString(const int length) :
+ mOffset(calculateOffset(length))
+{}
+
+template <int Size>
+inline const char * IndentString<Size>::string() const
+{
+ extern const char sIndentStringBuffer[];
+ return sIndentStringBuffer + mOffset;
+}
+
+
+
+
+inline Debug * Debug::instance()
+{ return &sInstance; }
+
+
+inline Debug::ThreadInfo * Debug::findCurrentThreadInfo()
+{
+ // retain reference to threads data
+ android::sp<Data> data = mData;
+
+ // iterate over threads to locate thread id,
+ // this is safe from race conditions because each thread
+ // is able to modify only his own ThreadInfo structure
+ const int32_t threadId = reinterpret_cast<int32_t>(androidGetThreadId());
+ const int size = int(data->threads.size());
+ for ( int i = 0; i < size; ++i )
+ {
+ ThreadInfo * const threadInfo = data->threads.itemAt(i);
+ if ( threadInfo->threadId == threadId )
+ return threadInfo;
+ }
+
+ // this thread has not been registered yet,
+ // try to fing empty thread info slot
+ while ( true )
+ {
+ ThreadInfo * const threadInfo = registerThread(data.get(), threadId);
+ if ( threadInfo )
+ return threadInfo;
+
+ // failed registering thread, because all slots are occupied
+ // grow the data and try again
+ grow();
+
+ data = mData;
+ }
+
+ // should never reach here
+ _DBGUTILS_PLAIN_ASSERT(false);
+ return 0;
+}
+
+
+inline void Debug::addOffsetForCurrentThread(const int offset)
+{
+ if ( offset == 0 )
+ return;
+
+ ThreadInfo * const threadInfo = findCurrentThreadInfo();
+ _DBGUTILS_PLAIN_ASSERT(threadInfo);
+
+ threadInfo->callOffset += offset;
+
+ if ( threadInfo->callOffset == 0 )
+ {
+ // thread call stack has dropped to zero, unregister it
+ android_atomic_acquire_store(0, &threadInfo->threadId);
+ }
+}
+
+
+inline int Debug::offsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ ThreadInfo * const threadInfo = findCurrentThreadInfo();
+ _DBGUTILS_PLAIN_ASSERT(threadInfo);
+
+ return threadInfo->callOffset;
+#else
+ return 0;
+#endif
+}
+
+
+inline void Debug::increaseOffsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ addOffsetForCurrentThread(1);
+#endif
+}
+
+
+inline void Debug::decreaseOffsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ addOffsetForCurrentThread(-1);
+#endif
+}
+
+
+inline void Debug::log(const int priority, const char * const format, ...)
+{
+ va_list args;
+ va_start(args, format);
+ __android_log_vprint(priority, LOG_TAG, format, args);
+ va_end(args);
+}
+
+
+
+
+inline FunctionLogger::FunctionLogger(const char * const file, const int line, const char * const function) :
+ mFile(file), mLine(line), mFunction(function), mThreadId(androidGetThreadId()), mExitLine(-1)
+{
+ Debug * const debug = Debug::instance();
+ debug->increaseOffsetForCurrentThread();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s+ %s:%d %s - ENTER",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, mLine, mFunction);
+}
+
+
+inline FunctionLogger::~FunctionLogger()
+{
+ Debug * const debug = Debug::instance();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s- %s:%d %s - EXIT",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, mExitLine == -1 ? mLine : mExitLine, mFunction);
+ debug->decreaseOffsetForCurrentThread();
+}
+
+
+inline void FunctionLogger::setExitLine(const int line)
+{
+ if ( mExitLine != -1 )
+ {
+ Debug * const debug = Debug::instance();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - Double function exit trace detected. Previous: %d",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, line, mFunction, mExitLine);
+ }
+
+ mExitLine = line;
+}
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif //DEBUG_UTILS_H
diff --git a/libtiutils/ErrorUtils.cpp b/libtiutils/ErrorUtils.cpp
new file mode 100644
index 0000000..7aa074f
--- /dev/null
+++ b/libtiutils/ErrorUtils.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "ErrorUtils.h"
+
+namespace Ti {
+namespace Utils {
+
+/**
+ @brief Method to convert from POSIX to Android errors
+
+ @param error Any of the standard POSIX error codes (defined in bionic/libc/kernel/common/asm-generic/errno.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+status_t ErrorUtils::posixToAndroidError(int error)
+{
+ switch(error)
+ {
+ case 0:
+ return NO_ERROR;
+ case EINVAL:
+ case EFBIG:
+ case EMSGSIZE:
+ case E2BIG:
+ case EFAULT:
+ case EILSEQ:
+ return BAD_VALUE;
+ case ENOSYS:
+ return INVALID_OPERATION;
+ case EACCES:
+ case EPERM:
+ return PERMISSION_DENIED;
+ case EADDRINUSE:
+ case EAGAIN:
+ case EALREADY:
+ case EBUSY:
+ case EEXIST:
+ case EINPROGRESS:
+ return ALREADY_EXISTS;
+ case ENOMEM:
+ return NO_MEMORY;
+ default:
+ return UNKNOWN_ERROR;
+ };
+
+ return NO_ERROR;
+}
+
+
+/**
+ @brief Method to convert from TI OSAL to Android errors
+
+ @param error Any of the standard TI OSAL error codes (defined in
+ hardware/ti/omx/ducati/domx/system/mm_osal/inc/timm_osal_error.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+status_t ErrorUtils::osalToAndroidError(int error)
+{
+ switch(error)
+ {
+ case 0:
+ return NO_ERROR;
+ case -1:
+ return NO_MEMORY;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
+/**
+ @brief Method to convert from OMX to Android errors
+
+ @param error Any of the standard OMX error codes (defined in hardware/ti/omx/ducati/domx/system/omx_core/inc/OMX_Core.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+/*
+status_t ErrorUtils::omxToAndroidError(OMX_ERRORTYPE error)
+{
+ switch(error)
+ {
+ case OMX_ErrorNone:
+ return NO_ERROR;
+ case OMX_ErrorBadParameter:
+ case OMX_ErrorInvalidComponentName:
+ case OMX_ErrorUndefined:
+ case OMX_ErrorInvalidState:
+ case OMX_ErrorStreamCorrupt:
+ case OMX_ErrorPortsNotCompatible:
+ case OMX_ErrorVersionMismatch:
+ case OMX_ErrorMbErrorsInFrame:
+ return BAD_VALUE;
+ case OMX_ErrorInsufficientResources:
+ return NO_MEMORY;
+ case OMX_ErrorComponentNotFound:
+ case OMX_ErrorNotImplemented:
+ case OMX_ErrorFormatNotDetected:
+ case OMX_ErrorUnsupportedSetting:
+ return NAME_NOT_FOUND;
+ case OMX_ErrorUnderflow:
+ case OMX_ErrorOverflow:
+ case OMX_ErrorUnsupportedIndex:
+ case OMX_ErrorBadPortIndex:
+ return BAD_INDEX;
+ case OMX_ErrorHardware:
+ case OMX_ErrorContentPipeCreationFailed:
+ case OMX_ErrorContentPipeOpenFailed:
+ return FAILED_TRANSACTION;
+ case OMX_ErrorTimeout:
+ return TIMED_OUT;
+ case OMX_ErrorSameState:
+ case OMX_ErrorIncorrectStateTransition:
+ case OMX_ErrorIncorrectStateOperation:
+ return PERMISSION_DENIED;
+ case OMX_ErrorTunnelingUnsupported:
+ return INVALID_OPERATION;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+*/
+
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/ErrorUtils.h b/libtiutils/ErrorUtils.h
new file mode 100644
index 0000000..709d33d
--- /dev/null
+++ b/libtiutils/ErrorUtils.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ERROR_UTILS_H
+#define ERROR_UTILS_H
+
+///Header file where all the android error codes are defined
+#include <utils/Errors.h>
+
+///Header file where all the OMX error codes are defined
+//#include <OMX_Core.h>
+
+#include "Status.h"
+
+namespace Ti {
+namespace Utils {
+
+///Generic class with static methods to convert any standard error type to Android error type
+class ErrorUtils
+{
+public:
+ ///Method to convert from POSIX to Android errors
+ static status_t posixToAndroidError(int error);
+
+ ///Method to convert from TI OSAL to Android errors
+ static status_t osalToAndroidError(int error);
+
+ ///Method to convert from OMX to Android errors
+// static status_t omxToAndroidError(OMX_ERRORTYPE error);
+
+};
+
+} // namespace Utils
+} // namespace Ti
+
+#endif /// ERROR_UTILS_H
diff --git a/libtiutils/MessageQueue.cpp b/libtiutils/MessageQueue.cpp
new file mode 100644
index 0000000..13b1d53
--- /dev/null
+++ b/libtiutils/MessageQueue.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <errno.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/poll.h>
+#include <unistd.h>
+#include <utils/Errors.h>
+
+
+
+#define LOG_TAG "MessageQueue"
+#include <utils/Log.h>
+
+#include "MessageQueue.h"
+
+namespace Ti {
+namespace Utils {
+
+/**
+ @brief Constructor for the message queue class
+
+ @param none
+ @return none
+ */
+MessageQueue::MessageQueue()
+{
+ LOG_FUNCTION_NAME;
+
+ int fds[2] = {-1,-1};
+ android::status_t stat;
+
+ stat = pipe(fds);
+
+ if ( 0 > stat )
+ {
+ MSGQ_LOGEB("Error while openning pipe: %s", strerror(stat) );
+ this->fd_read = 0;
+ this->fd_write = 0;
+ mHasMsg = false;
+ }
+ else
+ {
+ this->fd_read = fds[0];
+ this->fd_write = fds[1];
+
+ mHasMsg = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor for the semaphore class
+
+ @param none
+ @return none
+ */
+MessageQueue::~MessageQueue()
+{
+ LOG_FUNCTION_NAME;
+
+ if(this->fd_read >= 0)
+ {
+ close(this->fd_read);
+ }
+
+ if(this->fd_write >= 0)
+ {
+ close(this->fd_write);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Get a message from the queue
+
+ @param msg Message structure to hold the message to be retrieved
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE if the message pointer is NULL
+ @return android::NO_INIT If the file read descriptor is not set
+ @return android::UNKNOWN_ERROR if the read operation fromthe file read descriptor fails
+ */
+android::status_t MessageQueue::get(Message* msg)
+{
+ LOG_FUNCTION_NAME;
+
+ if(!msg)
+ {
+ MSGQ_LOGEA("msg is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ char* p = (char*) msg;
+ size_t read_bytes = 0;
+
+ while( read_bytes < sizeof(*msg) )
+ {
+ int err = read(this->fd_read, p, sizeof(*msg) - read_bytes);
+
+ if( err < 0 )
+ {
+ MSGQ_LOGEB("read() error: %s", strerror(errno));
+ return android::UNKNOWN_ERROR;
+ }
+ else
+ {
+ read_bytes += err;
+ }
+ }
+
+ MSGQ_LOGDB("MQ.get(%d,%p,%p,%p,%p)", msg->command, msg->arg1,msg->arg2,msg->arg3,msg->arg4);
+
+ mHasMsg = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return 0;
+}
+
+/**
+ @brief Get the input file descriptor of the message queue
+
+ @param none
+ @return file read descriptor
+ */
+
+int MessageQueue::getInFd()
+{
+ return this->fd_read;
+}
+
+/**
+ @brief Constructor for the message queue class
+
+ @param fd file read descriptor
+ @return none
+ */
+
+void MessageQueue::setInFd(int fd)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( -1 != this->fd_read )
+ {
+ close(this->fd_read);
+ }
+
+ this->fd_read = fd;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Queue a message
+
+ @param msg Message structure to hold the message to be retrieved
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE if the message pointer is NULL
+ @return android::NO_INIT If the file write descriptor is not set
+ @return android::UNKNOWN_ERROR if the write operation fromthe file write descriptor fails
+ */
+
+android::status_t MessageQueue::put(Message* msg)
+{
+ LOG_FUNCTION_NAME;
+
+ char* p = (char*) msg;
+ size_t bytes = 0;
+
+ if(!msg)
+ {
+ MSGQ_LOGEA("msg is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ if(!this->fd_write)
+ {
+ MSGQ_LOGEA("write descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+
+ MSGQ_LOGDB("MQ.put(%d,%p,%p,%p,%p)", msg->command, msg->arg1,msg->arg2,msg->arg3,msg->arg4);
+
+ while( bytes < sizeof(msg) )
+ {
+ int err = write(this->fd_write, p, sizeof(*msg) - bytes);
+
+ if( err < 0 )
+ {
+ MSGQ_LOGEB("write() error: %s", strerror(errno));
+ LOG_FUNCTION_NAME_EXIT;
+ return android::UNKNOWN_ERROR;
+ }
+ else
+ {
+ bytes += err;
+ }
+ }
+
+ MSGQ_LOGDA("MessageQueue::put EXIT");
+
+ LOG_FUNCTION_NAME_EXIT;
+ return 0;
+}
+
+
+/**
+ @brief Returns if the message queue is empty or not
+
+ @param none
+ @return true If the queue is empty
+ @return false If the queue has at least one message
+ */
+bool MessageQueue::isEmpty()
+{
+ LOG_FUNCTION_NAME;
+
+ struct pollfd pfd;
+
+ pfd.fd = this->fd_read;
+ pfd.events = POLLIN;
+ pfd.revents = 0;
+
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+
+ if( -1 == poll(&pfd,1,0) )
+ {
+ MSGQ_LOGEB("poll() error: %s", strerror(errno));
+ LOG_FUNCTION_NAME_EXIT;
+ return false;
+ }
+
+ if(pfd.revents & POLLIN)
+ {
+ mHasMsg = true;
+ }
+ else
+ {
+ mHasMsg = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return !mHasMsg;
+}
+
+void MessageQueue::clear()
+{
+ LOG_FUNCTION_NAME;
+
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ Message msg;
+ while(!isEmpty())
+ {
+ get(&msg);
+ }
+
+}
+
+
+/**
+ @brief Force whether the message queue has message or not
+
+ @param hasMsg Whether the queue has a message or not
+ @return none
+ */
+void MessageQueue::setMsg(bool hasMsg)
+ {
+ mHasMsg = hasMsg;
+ }
+
+
+/**
+ @briefWait for message in maximum three different queues with a timeout
+
+ @param queue1 First queue. At least this should be set to a valid queue pointer
+ @param queue2 Second queue. Optional.
+ @param queue3 Third queue. Optional.
+ @param timeout The timeout value (in micro secs) to wait for a message in any of the queues
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE If queue1 is NULL
+ @return android::NO_INIT If the file read descriptor of any of the provided queues is not set
+ */
+android::status_t MessageQueue::waitForMsg(MessageQueue *queue1, MessageQueue *queue2, MessageQueue *queue3, int timeout)
+ {
+ LOG_FUNCTION_NAME;
+
+ int n =1;
+ struct pollfd pfd[3];
+
+ if(!queue1)
+ {
+ MSGQ_LOGEA("queue1 pointer is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ pfd[0].fd = queue1->getInFd();
+ if(!pfd[0].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue1");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+ pfd[0].events = POLLIN;
+ pfd[0].revents = 0;
+ if(queue2)
+ {
+ MSGQ_LOGDA("queue2 not-null");
+ pfd[1].fd = queue2->getInFd();
+ if(!pfd[1].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue2");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ pfd[1].events = POLLIN;
+ pfd[1].revents = 0;
+ n++;
+ }
+
+ if(queue3)
+ {
+ MSGQ_LOGDA("queue3 not-null");
+ pfd[2].fd = queue3->getInFd();
+ if(!pfd[2].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue3");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ pfd[2].events = POLLIN;
+ pfd[2].revents = 0;
+ n++;
+ }
+
+
+ int ret = poll(pfd, n, timeout);
+ if(ret==0)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ if(ret<android::NO_ERROR)
+ {
+ MSGQ_LOGEB("Message queue returned error %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ if (pfd[0].revents & POLLIN)
+ {
+ queue1->setMsg(true);
+ }
+
+ if(queue2)
+ {
+ if (pfd[1].revents & POLLIN)
+ {
+ queue2->setMsg(true);
+ }
+ }
+
+ if(queue3)
+ {
+ if (pfd[2].revents & POLLIN)
+ {
+ queue3->setMsg(true);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/MessageQueue.h b/libtiutils/MessageQueue.h
new file mode 100644
index 0000000..68943b7
--- /dev/null
+++ b/libtiutils/MessageQueue.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef __MESSAGEQUEUE_H__
+#define __MESSAGEQUEUE_H__
+
+#include "DebugUtils.h"
+#include <stdint.h>
+
+#ifdef MSGQ_DEBUG
+# define MSGQ_LOGDA DBGUTILS_LOGDA
+# define MSGQ_LOGDB DBGUTILS_LOGDB
+#else
+# define MSGQ_LOGDA(str)
+# define MSGQ_LOGDB(str, ...)
+#endif
+
+#define MSGQ_LOGEA DBGUTILS_LOGEA
+#define MSGQ_LOGEB DBGUTILS_LOGEB
+
+namespace Ti {
+namespace Utils {
+
+///Message type
+struct Message
+{
+ unsigned int command;
+ void* arg1;
+ void* arg2;
+ void* arg3;
+ void* arg4;
+ int64_t id;
+};
+
+///Message queue implementation
+class MessageQueue
+{
+public:
+
+ MessageQueue();
+ ~MessageQueue();
+
+ ///Get a message from the queue
+ android::status_t get(Message*);
+
+ ///Get the input file descriptor of the message queue
+ int getInFd();
+
+ ///Set the input file descriptor for the message queue
+ void setInFd(int fd);
+
+ ///Queue a message
+ android::status_t put(Message*);
+
+ ///Returns if the message queue is empty or not
+ bool isEmpty();
+
+ void clear();
+
+ ///Force whether the message queue has message or not
+ void setMsg(bool hasMsg=false);
+
+ ///Wait for message in maximum three different queues with a timeout
+ static int waitForMsg(MessageQueue *queue1, MessageQueue *queue2=0, MessageQueue *queue3=0, int timeout = 0);
+
+ bool hasMsg()
+ {
+ return mHasMsg;
+ }
+
+private:
+ int fd_read;
+ int fd_write;
+ bool mHasMsg;
+};
+
+} // namespace Utils
+} // namespace Ti
+
+
+
+
+#endif
diff --git a/libtiutils/Semaphore.cpp b/libtiutils/Semaphore.cpp
new file mode 100644
index 0000000..512eee3
--- /dev/null
+++ b/libtiutils/Semaphore.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include "Semaphore.h"
+#include "ErrorUtils.h"
+#include <utils/Log.h>
+#include <time.h>
+
+namespace Ti {
+namespace Utils {
+
+/**
+ @brief Constructor for the semaphore class
+
+ @param none
+ @return none
+ */
+Semaphore::Semaphore()
+{
+ ///Initialize the semaphore to NULL
+ mSemaphore = NULL;
+}
+
+/**
+ @brief Destructor of the semaphore class
+
+ @param none
+ @return none
+
+ */
+Semaphore::~Semaphore()
+{
+ Release();
+}
+
+/**
+ @brief: Releases semaphore
+
+ @param count >=0
+ @return NO_ERROR On Success
+ @return One of the android error codes based on semaphore de-initialization
+ */
+
+status_t Semaphore::Release()
+{
+ int status = 0;
+
+ ///Destroy only if the semaphore has been created
+ if(mSemaphore)
+ {
+ status = sem_destroy(mSemaphore);
+
+ free(mSemaphore);
+
+ mSemaphore = NULL;
+ }
+
+ ///Initialize the semaphore and return the status
+ return ErrorUtils::posixToAndroidError(status);
+
+}
+
+/**
+ @brief Create the semaphore with initial count value
+
+ @param count >=0
+ @return NO_ERROR On Success
+ @return NO_MEMORY If unable to allocate memory for the semaphore
+ @return BAD_VALUE If an invalid count value is passed (<0)
+ @return One of the android error codes based on semaphore initialization
+ */
+
+status_t Semaphore::Create(int count)
+{
+ status_t ret = NO_ERROR;
+
+ ///count cannot be less than zero
+ if(count<0)
+ {
+ return BAD_VALUE;
+ }
+
+ ret = Release();
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///allocate memory for the semaphore
+ mSemaphore = (sem_t*)malloc(sizeof(sem_t)) ;
+
+ ///if memory is unavailable, return error
+ if(!mSemaphore)
+ {
+ return NO_MEMORY;
+ }
+
+ ///Initialize the semaphore and return the status
+ return ErrorUtils::posixToAndroidError(sem_init(mSemaphore, 0x00, count));
+
+}
+
+/**
+ @brief Wait operation
+
+ @param none
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore wait operation
+ */
+status_t Semaphore::Wait()
+{
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///Wait and return the status after signalling
+ return ErrorUtils::posixToAndroidError(sem_wait(mSemaphore));
+
+
+}
+
+
+/**
+ @brief Signal operation
+
+ @param none
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore signal operation
+ */
+
+status_t Semaphore::Signal()
+{
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///Post to the semaphore
+ return ErrorUtils::posixToAndroidError(sem_post(mSemaphore));
+
+}
+
+/**
+ @brief Current semaphore count
+
+ @param none
+ @return Current count value of the semaphore
+ */
+int Semaphore::Count()
+{
+ int val;
+
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///get the value of the semaphore
+ sem_getvalue(mSemaphore, &val);
+
+ return val;
+}
+
+/**
+ @brief Wait operation with a timeout
+
+ @param timeoutMicroSecs The timeout period in micro seconds
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore wait operation
+ */
+
+status_t Semaphore::WaitTimeout(int timeoutMicroSecs)
+{
+ status_t ret = NO_ERROR;
+
+ struct timespec timeSpec;
+ struct timeval currentTime;
+
+ ///semaphore should have been created first
+ if( NULL == mSemaphore)
+ {
+ ret = BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ ///setup the timeout values - timeout is specified in seconds and nanoseconds
+ gettimeofday(&currentTime, NULL);
+ timeSpec.tv_sec = currentTime.tv_sec;
+ timeSpec.tv_nsec = currentTime.tv_usec * 1000;
+ timeSpec.tv_sec += ( timeoutMicroSecs / 1000000 );
+ timeSpec.tv_nsec += ( timeoutMicroSecs % 1000000) * 1000;
+
+ ///Wait for the timeout or signal and return the result based on whichever event occurred first
+ ret = sem_timedwait(mSemaphore, &timeSpec);
+ }
+
+ if ( NO_ERROR != ret )
+ {
+ Signal();
+ Create(0);
+ }
+
+ return ret;
+}
+
+
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Semaphore.h b/libtiutils/Semaphore.h
new file mode 100644
index 0000000..8d64f3f
--- /dev/null
+++ b/libtiutils/Semaphore.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include <utils/Errors.h>
+#include <semaphore.h>
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+#include "Status.h"
+
+namespace Ti {
+namespace Utils {
+
+class Semaphore
+{
+public:
+
+ Semaphore();
+ ~Semaphore();
+
+ //Release semaphore
+ status_t Release();
+
+ ///Create the semaphore with initial count value
+ status_t Create(int count=0);
+
+ ///Wait operation
+ status_t Wait();
+
+ ///Signal operation
+ status_t Signal();
+
+ ///Current semaphore count
+ int Count();
+
+ ///Wait operation with a timeout
+ status_t WaitTimeout(int timeoutMicroSecs);
+
+private:
+ sem_t *mSemaphore;
+
+};
+
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Status.h b/libtiutils/Status.h
new file mode 100644
index 0000000..ded2cec
--- /dev/null
+++ b/libtiutils/Status.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_UTILS_STATUS_H
+#define TI_UTILS_STATUS_H
+
+#include <utils/Errors.h>
+
+#include "UtilsCommon.h"
+
+
+
+
+namespace Ti {
+
+
+
+
+typedef int status_t;
+
+#define TI_CAMERA_DEFINE_STATUS_CODE(x) x = android::x,
+enum {
+ TI_CAMERA_DEFINE_STATUS_CODE(OK)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_MEMORY)
+ TI_CAMERA_DEFINE_STATUS_CODE(INVALID_OPERATION)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_VALUE)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_TYPE)
+ TI_CAMERA_DEFINE_STATUS_CODE(NAME_NOT_FOUND)
+ TI_CAMERA_DEFINE_STATUS_CODE(PERMISSION_DENIED)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_INIT)
+ TI_CAMERA_DEFINE_STATUS_CODE(ALREADY_EXISTS)
+ TI_CAMERA_DEFINE_STATUS_CODE(DEAD_OBJECT)
+ TI_CAMERA_DEFINE_STATUS_CODE(FAILED_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(JPARKS_BROKE_IT)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_INDEX)
+ TI_CAMERA_DEFINE_STATUS_CODE(NOT_ENOUGH_DATA)
+ TI_CAMERA_DEFINE_STATUS_CODE(WOULD_BLOCK)
+ TI_CAMERA_DEFINE_STATUS_CODE(TIMED_OUT)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(FDS_NOT_ALLOWED)
+};
+#undef TI_CAMERA_DEFINE_STATUS_CODE
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif // TI_UTILS_STATUS_H
diff --git a/libtiutils/UtilsCommon.h b/libtiutils/UtilsCommon.h
new file mode 100644
index 0000000..8aaeee7
--- /dev/null
+++ b/libtiutils/UtilsCommon.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_UTILS_COMMON_H
+#define TI_UTILS_COMMON_H
+
+#include <android/api-level.h>
+#include <android/log.h>
+
+
+
+namespace Ti {
+
+
+
+
+// default float point type
+typedef float real;
+
+
+
+
+template <typename T>
+int floor(T x);
+
+template <typename T>
+int round(T x);
+
+template <typename T>
+const T & min(const T & a, const T & b);
+
+template <typename T>
+const T & max(const T & a, const T & b);
+
+template <typename T>
+const T & bound(const T & min, const T & x, const T & max);
+
+template <typename T>
+T abs(const T & x);
+
+
+
+
+template <typename T>
+inline int floor(const T x) {
+ return static_cast<int>(x);
+}
+
+template <typename T>
+inline int round(const T x) {
+ if ( x >= 0 ) {
+ return floor(x + T(0.5));
+ } else {
+ return floor(x - floor(x - T(1)) + T(0.5)) + floor(x - T(1));
+ }
+}
+
+template <typename T>
+inline const T & min(const T & a, const T & b) {
+ return a < b ? a : b;
+}
+
+template <typename T>
+inline const T & max(const T & a, const T & b) {
+ return a < b ? b : a;
+}
+
+template <typename T>
+inline const T & bound(const T & min, const T & x, const T & max) {
+ return x < min ? min : x > max ? max : x;
+}
+
+template <typename T>
+inline T abs(const T & x) {
+ return x >= 0 ? x : -x;
+}
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif // TI_UTILS_COMMON_H