栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Java

android8,搜狐新闻客户端android面试题

Java 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

android8,搜狐新闻客户端android面试题

#define LOG_TAG “usb-Camera”

#define LOG_NDEBUG 0

#include

#include

#include

#include

#include

#include

#include

#include

#include

#include “DbgUtils.h”

#include “Camera.h”

#include “ImageConverter.h”

#include “libyuv.h”

//#include

#include

#include

#include

namespace android {

Camera::Camera()
mStaticCharacteristics(NULL)

, mCallbackOps(NULL)

, mJpegBufferSize(0) {

ALOGI(“Camera() start”);

DBGUTILS_AUTOLOGCALL(__func__);

for(size_t i = 0; i < NELEM(mDefaultRequestSettings); i++) {

mDefaultRequestSettings[i] = NULL;

}

common.tag      = HARDWARE_DEVICE_TAG;

common.version  = CAMERA_DEVICE_API_VERSION_3_2;//CAMERA_DEVICE_API_VERSION_3_0;

common.module   = &HAL_MODULE_INFO_SYM.common;

common.close    = Camera::sClose;

ops             = &sOps;

priv            = NULL;

mValid = true;

mframeBuffer = new uint8_t[640*480*4];

rszbuffer = new uint8_t[640*480*4];

mDev = new V4l2Device();

if(!mDev) {

mValid = false;

}

}

Camera::~Camera() {

DBGUTILS_AUTOLOGCALL(__func__);

gWorkers.stop();

mDev->disconnect();

delete[] mframeBuffer;

delete[] rszbuffer;

delete mDev;

}

status_t Camera::cameraInfo(struct camera_info *info) {

DBGUTILS_AUTOLOGCALL(__func__);

ALOGI(“Camera::cameraInfo entry”);

ALOGE(“Camera::cameraInfo entry”);

Mutex::Autolock lock(mMutex);

info->facing = CAMERA_FACING_FRONT;//BACK;//FRONT;

info->orientation = 0;

info->device_version = CAMERA_DEVICE_API_VERSION_3_2;//CAMERA_DEVICE_API_VERSION_3_0;//CAMERA_DEVICE_API_VERSION_3_4;

info->static_camera_characteristics = staticCharacteristics();

return NO_ERROR;

}

int Camera::openDevice(hw_device_t **device) {

ALOGI("%s",__FUNCTION__);

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

char enableTimesTamp[PROPERTY_VALUE_MAX];

char enableAVI[PROPERTY_VALUE_MAX];

mDev->connect();

*device = &common;

gWorkers.start();

return NO_ERROR;

}

int Camera::closeDevice() {

ALOGI("%s",__FUNCTION__);

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

gWorkers.stop();

mDev->disconnect();

return NO_ERROR;

}

camera_metadata_t *Camera::staticCharacteristics() {

if(mStaticCharacteristics)

return mStaticCharacteristics;

Camerametadata cm;

auto &resolutions = mDev->availableResolutions();

auto &previewResolutions = resolutions;

auto sensorRes = mDev->sensorResolution();

const float sensorInfoPhysicalSize[] = {

5.0f,

5.0f * (float)sensorRes.height / (float)sensorRes.width

};

cm.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorInfoPhysicalSize, NELEM(sensorInfoPhysicalSize));

static const float lensInfoAvailableFocalLengths[] = {3.30f};

cm.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, lensInfoAvailableFocalLengths, NELEM(lensInfoAvailableFocalLengths));

static const uint8_t lensFacing = ANDROID_LENS_FACING_FRONT;

cm.update(ANDROID_LENS_FACING, &lensFacing, 1);

const int32_t sensorInfoPixelArraySize[] = {

(int32_t)sensorRes.width,

(int32_t)sensorRes.height

};

cm.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, sensorInfoPixelArraySize, NELEM(sensorInfoPixelArraySize));

const int32_t sensorInfoActiveArraySize[] = {

0,                          0,

(int32_t)sensorRes.width,   (int32_t)sensorRes.height

};

cm.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, sensorInfoActiveArraySize, NELEM(sensorInfoActiveArraySize));

static const int32_t scalerAvailableFormats[] = {

HAL_PIXEL_FORMAT_RGBA_8888, //预览流

HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,//预览流

HAL_PIXEL_FORMAT_BLOB//拍照流

};

cm.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalerAvailableFormats, NELEM(scalerAvailableFormats));

const size_t mainStreamConfigsCount = resolutions.size();

const size_t previewStreamConfigsCount = previewResolutions.size() * (NELEM(scalerAvailableFormats) - 1);

const size_t streamConfigsCount = mainStreamConfigsCount + previewStreamConfigsCount;

int32_t scalerAvailableStreamConfigurations[streamConfigsCount * 4];

int64_t scalerAvailableMinframeDurations[streamConfigsCount * 4];

int32_t scalerAvailableProcessedSizes[previewResolutions.size() * 2];

int64_t scalerAvailableProcessedMinDurations[previewResolutions.size()];

int32_t scalerAvailableJpegSizes[resolutions.size() * 2];

int64_t scalerAvailableJpegMinDurations[resolutions.size()];

size_t i4 = 0;

size_t i2 = 0;

size_t i1 = 0;

for(size_t resId = 0; resId < resolutions.size(); ++resId) {

scalerAvailableStreamConfigurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB;

scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)resolutions[resId].width;

scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)resolutions[resId].height;

scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;

scalerAvailableMinframeDurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB;

scalerAvailableMinframeDurations[i4 + 1] = (int32_t)resolutions[resId].width;

scalerAvailableMinframeDurations[i4 + 2] = (int32_t)resolutions[resId].height;

scalerAvailableMinframeDurations[i4 + 3] = 1000000000 / 30;

scalerAvailableJpegSizes[i2 + 0] = (int32_t)resolutions[resId].width;

scalerAvailableJpegSizes[i2 + 1] = (int32_t)resolutions[resId].height;

scalerAvailableJpegMinDurations[i1] = 1000000000 / 30;

i4 += 4;

i2 += 2;

i1 += 1;

}

i2 = 0;

i1 = 0;

for(size_t resId = 0; resId < previewResolutions.size(); ++resId) {

for(size_t fmtId = 0; fmtId < NELEM(scalerAvailableFormats) - 1; ++fmtId) {

scalerAvailableStreamConfigurations[i4 + 0] = scalerAvailableFormats[fmtId];

scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)previewResolutions[resId].width;

scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)previewResolutions[resId].height;

scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;

scalerAvailableMinframeDurations[i4 + 0] = scalerAvailableFormats[fmtId];

scalerAvailableMinframeDurations[i4 + 1] = (int32_t)previewResolutions[resId].width;

scalerAvailableMinframeDurations[i4 + 2] = (int32_t)previewResolutions[resId].height;

scalerAvailableMinframeDurations[i4 + 3] = 1000000000 / 10;

i4 += 4;

}

scalerAvailableProcessedSizes[i2 + 0] = (int32_t)previewResolutions[resId].width;

scalerAvailableProcessedSizes[i2 + 1] = (int32_t)previewResolutions[resId].height;

scalerAvailableProcessedMinDurations[i1] = 1000000000 / 10;

i2 += 2;

i1 += 1;

}

cm.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, scalerAvailableStreamConfigurations, (size_t)NELEM(scalerAvailableStreamConfigurations));

cm.update(ANDROID_SCALER_AVAILABLE_MIN_frame_DURATIONS, scalerAvailableMinframeDurations, (size_t)NELEM(scalerAvailableMinframeDurations));

cm.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, scalerAvailableMinframeDurations, (size_t)NELEM(scalerAvailableMinframeDurations)
);

cm.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, scalerAvailableJpegSizes, (size_t)NELEM(scalerAvailableJpegSizes));

cm.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, scalerAvailableJpegMinDurations, (size_t)NELEM(scalerAvailableJpegMinDurations));

cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, scalerAvailableProcessedSizes, (size_t)NELEM(scalerAvailableProcessedSizes));

cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, scalerAvailableProcessedMinDurations, (size_t)NELEM(scalerAvailableProcessedMinDurations));

//添加capabilities集,否则api2的接口,在调用getStreamConfigurationMap去获取REQUEST_AVAILABLE_CAPABILITIES值时会失败。

Vector available_capabilities;

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);

cm.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,

available_capabilities.array(),

available_capabilities.size());

mJpegBufferSize = sensorRes.width * sensorRes.height * 9 + sizeof(camera3_jpeg_blob);

mJpegBufferSize = (mJpegBufferSize + PAGE_SIZE - 1u) & ~(PAGE_SIZE - 1u);

const int32_t jpegMaxSize = (int32_t)mJpegBufferSize;

cm.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);

static const int32_t jpegAvailableThumbnailSizes[] = {

0, 0,

320, 240

};

cm.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, NELEM(jpegAvailableThumbnailSizes));

static const int32_t sensorOrientation = 90;

cm.update(ANDROID_SENSOR_ORIENTATION, &sensorOrientation, 1);

static const uint8_t flashInfoAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;

cm.update(ANDROID_FLASH_INFO_AVAILABLE, &flashInfoAvailable, 1);

static const float scalerAvailableMaxDigitalZoom = 1;

cm.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &scalerAvailableMaxDigitalZoom, 1);

static const uint8_t statisticsFaceDetectModes[] = {

ANDROID_STATISTICS_FACE_DETECT_MODE_OFF

};

cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, statisticsFaceDetectModes, NELEM(statisticsFaceDetectModes));

static const int32_t statisticsInfoMaxFaceCount = 0;

cm.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &statisticsInfoMaxFaceCount, 1);

static const uint8_t controlAvailableSceneModes[] = {

ANDROID_CONTROL_SCENE_MODE_DISABLED

};

cm.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, controlAvailableSceneModes, NELEM(controlAvailableSceneModes));

static const uint8_t controlAvailableEffects[] = {

ANDROID_CONTROL_EFFECT_MODE_OFF

};

cm.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, controlAvailableEffects, NELEM(controlAvailableEffects));

static const int32_t controlMaxRegions[] = {

0,

0,

0  

};

cm.update(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, NELEM(controlMaxRegions));

static const uint8_t controlAeAvailableModes[] = {

ANDROID_CONTROL_AE_MODE_OFF

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, controlAeAvailableModes, NELEM(controlAeAvailableModes));

static const camera_metadata_rational controlAeCompensationStep = {1, 3};

cm.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, &controlAeCompensationStep, 1);

int32_t controlAeCompensationRange[] = {-9, 9};

cm.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, NELEM(controlAeCompensationRange));

static const int32_t controlAeAvailableTargetFpsRanges[] = {

10, 20

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, controlAeAvailableTargetFpsRanges, NELEM(controlAeAvailableTargetFpsRanges));

static const uint8_t controlAeAvailableAntibandingModes[] = {

ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, controlAeAvailableAntibandingModes, NELEM(controlAeAvailableAntibandingModes));

static const uint8_t controlAwbAvailableModes[] = {

ANDROID_CONTROL_AWB_MODE_AUTO,

ANDROID_CONTROL_AWB_MODE_OFF

};

cm.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, controlAwbAvailableModes, NELEM(controlAwbAvailableModes));

static const uint8_t controlAfAvailableModes[] = {

ANDROID_CONTROL_AF_MODE_OFF

};

cm.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, controlAfAvailableModes, NELEM(controlAfAvailableModes));

static const uint8_t controlAvailableVideoStabilizationModes[] = {

ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF

};

cm.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, controlAvailableVideoStabilizationModes, NELEM(controlAvailableVideoStabilizationModes));

const uint8_t infoSupportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;

cm.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &infoSupportedHardwareLevel, 1);

mStaticCharacteristics = cm.release();

return mStaticCharacteristics;

}

int Camera::initialize(const camera3_callback_ops_t *callbackOps) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

mCallbackOps = callbackOps;

return NO_ERROR;

}

const camera_metadata_t * Camera::constructDefaultRequestSettings(int type) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

if(mDefaultRequestSettings[type]) {

return mDefaultRequestSettings[type];

}

Camerametadata cm;

static const int32_t requestId = 0;

cm.update(ANDROID_REQUEST_ID, &requestId, 1);

static const float lensFocusDistance = 0.0f;

cm.update(ANDROID_LENS_FOCUS_DISTANCE, &lensFocusDistance, 1);

auto sensorSize = mDev->sensorResolution();

const int32_t scalerCropRegion[] = {

0,                          0,

(int32_t)sensorSize.width,  (int32_t)sensorSize.height

};

cm.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, NELEM(scalerCropRegion));

static const int32_t jpegThumbnailSize[] = {

0, 0

};

cm.update(ANDROID_JPEG_THUMBNAIL_SIZE, jpegThumbnailSize, NELEM(jpegThumbnailSize));

static const uint8_t jpegThumbnailQuality = 50;

cm.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegThumbnailQuality, 1);

static const double jpegGpsCoordinates[] = {

0, 0

};

cm.update(ANDROID_JPEG_GPS_COORDINATES, jpegGpsCoordinates, NELEM(jpegGpsCoordinates));

static const uint8_t jpegGpsProcessingMethod[32] = “None”;

cm.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, jpegGpsProcessingMethod, NELEM(jpegGpsProcessingMethod));

static const int64_t jpegGpsTimestamp = 0;

cm.update(ANDROID_JPEG_GPS_TIMESTAMP, &jpegGpsTimestamp, 1);

static const int32_t jpegOrientation = 0;

cm.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);

static const uint8_t statisticsFaceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;

cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &statisticsFaceDetectMode, 1);

static const uint8_t statisticsHistogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;

cm.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &statisticsHistogramMode, 1);

static const uint8_t statisticsSharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;

cm.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &statisticsSharpnessMapMode, 1);

uint8_t controlCaptureIntent = 0;

switch (type) {

case CAMERA3_TEMPLATE_PREVIEW:          controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;             break;

case CAMERA3_TEMPLATE_STILL_CAPTURE:    controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;       break;

case CAMERA3_TEMPLATE_VIDEO_RECORD:     controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;        break;

case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:   controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;      break;

case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;    break;

default:                                controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;              break;

}

cm.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlCaptureIntent, 1);

static const uint8_t controlMode = ANDROID_CONTROL_MODE_OFF;

cm.update(ANDROID_CONTROL_MODE, &controlMode, 1);

static const uint8_t controlEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;

cm.update(ANDROID_CONTROL_EFFECT_MODE, &controlEffectMode, 1);

static const uint8_t controlSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;

cm.update(ANDROID_CONTROL_SCENE_MODE, &controlSceneMode, 1);

static const uint8_t controlAeMode = ANDROID_CONTROL_AE_MODE_OFF;

cm.update(ANDROID_CONTROL_AE_MODE, &controlAeMode, 1);

static const uint8_t controlAeLock = ANDROID_CONTROL_AE_LOCK_OFF;

cm.update(ANDROID_CONTROL_AE_LOCK, &controlAeLock, 1);

static const int32_t controlAeRegions[] = {

0,                          0,

(int32_t)sensorSize.width,  (int32_t)sensorSize.height,

1000

};

cm.update(ANDROID_CONTROL_AE_REGIONS, controlAeRegions, NELEM(controlAeRegions));

cm.update(ANDROID_CONTROL_AWB_REGIONS, controlAeRegions, NELEM(controlAeRegions));

cm.update(ANDROID_CONTROL_AF_REGIONS, controlAeRegions, NELEM(controlAeRegions));

static const int32_t controlAeExposureCompensation = 0;

cm.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &controlAeExposureCompensation, 1);

static const int32_t controlAeTargetFpsRange[] = {

10, 20

};

cm.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, controlAeTargetFpsRange, NELEM(controlAeTargetFpsRange));

static const uint8_t controlAeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;

cm.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &controlAeAntibandingMode, 1);

static const uint8_t controlAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;

cm.update(ANDROID_CONTROL_AWB_MODE, &controlAwbMode, 1);

static const uint8_t controlAwbLock = ANDROID_CONTROL_AWB_LOCK_OFF;

cm.update(ANDROID_CONTROL_AWB_LOCK, &controlAwbLock, 1);

uint8_t controlAfMode = ANDROID_CONTROL_AF_MODE_OFF;

cm.update(ANDROID_CONTROL_AF_MODE, &controlAfMode, 1);

static const uint8_t controlAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;

cm.update(ANDROID_CONTROL_AE_STATE, &controlAeState, 1);

static const uint8_t controlAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;

cm.update(ANDROID_CONTROL_AF_STATE, &controlAfState, 1);

static const uint8_t controlAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;

cm.update(ANDROID_CONTROL_AWB_STATE, &controlAwbState, 1);

static const uint8_t controlVideoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;

cm.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &controlVideoStabilizationMode, 1);

static const int32_t controlAePrecaptureId = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;

cm.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, &controlAePrecaptureId, 1);

static const int32_t controlAfTriggerId = 0;

cm.update(ANDROID_CONTROL_AF_TRIGGER_ID, &controlAfTriggerId, 1);

mDefaultRequestSettings[type] = cm.release();

return mDefaultRequestSettings[type];

}

int Camera::configureStreams(camera3_stream_configuration_t *streamList) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

ALOGI(“configureStreams”);

ALOGI(“±------------------------------------------------------------------------------”);

ALOGI("| STREAMS FROM frameWORK");

ALOGI(“±------------------------------------------------------------------------------”);

for(size_t i = 0; i < streamList->num_streams; ++i) {

camera3_stream_t *newStream = streamList->streams[i];

ALOGI("| p=%p  fmt=0x%.2x  type=%u  usage=0x%.8x  size=%4ux%-4u  buf_no=%u",

newStream,

newStream->format,

newStream->stream_type,

newStream->usage,

newStream->width,

newStream->height,

newStream->max_buffers);

}

ALOGI(“±------------------------------------------------------------------------------”);

camera3_stream_t *inStream = NULL;

unsigned width = 0;

unsigned height = 0;

for(size_t i = 0; i < streamList->num_streams; ++i) {

camera3_stream_t *newStream = streamList->streams[i];

if(newStream->stream_type == CAMERA3_STREAM_INPUT || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {

if(inStream) {

ALOGI(“only one input/bidirectional stream allowed (previous is %p, this %p)”, inStream, newStream);

return BAD_VALUE;

}

inStream = newStream;

}

if(newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {

newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;

}

if(newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {

ALOGI(“ZSL STREAM FOUND! It is not supported for now.”);

ALOGI("    Disable it by placing following line in /system/build.prop:");

ALOGI("    camera.disable_zsl_mode=1");

return BAD_VALUE;

}

switch(newStream->stream_type) {

case CAMERA3_STREAM_OUTPUT:         newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN;                                break;

case CAMERA3_STREAM_INPUT:          newStream->usage = GRALLOC_USAGE_SW_READ_OFTEN;                                 break;

case CAMERA3_STREAM_BIDIRECTIONAL:  newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN;  break;

}

newStream->max_buffers = 1;

if(newStream->width * newStream->height > width * height) {

width = newStream->width;

height = newStream->height;

}

}

if(mDev->isNeedsetResolution(width, height))

{

if(mDev->isStreaming())

{

if (!mDev->setStreaming(false))

{

ALOGI(“Could not stop streaming”);

return NO_INIT;

}

}

if (!mDev->setResolution(width, height))

{

ALOGI(“Could not set resolution”);

return NO_INIT;

}

}

ALOGI(“±------------------------------------------------------------------------------”);

ALOGI("| STREAMS AFTER CHANGES");

ALOGI(“±------------------------------------------------------------------------------”);

for(size_t i = 0; i < streamList->num_streams; ++i) {

const camera3_stream_t *newStream = streamList->streams[i];

ALOGI("| p=%p  fmt=0x%.2x  type=%u  usage=0x%.8x  size=%4ux%-4u  buf_no=%u",

newStream,

newStream->format,

newStream->stream_type,

newStream->usage,

newStream->width,

newStream->height,

newStream->max_buffers);

}

ALOGI(“±------------------------------------------------------------------------------”);

if(!mDev->setStreaming(true)) {

ALOGI(“Could not start streaming”);

return NO_INIT;

}

return NO_ERROR;

}

int Camera::registerStreamBuffers(const camera3_stream_buffer_set_t *bufferSet) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

ALOGI(“±------------------------------------------------------------------------------”);

ALOGI("| BUFFERS FOR STREAM %p", bufferSet->stream);

ALOGI(“±------------------------------------------------------------------------------”);

for (size_t i = 0; i < bufferSet->num_buffers; ++i) {

ALOGI("| p=%p", bufferSet->buffers[i]);

}

ALOGI(“±------------------------------------------------------------------------------”);

return OK;

}

int Camera::processCaptureRequest(camera3_capture_request_t *request) {

assert(request != NULL);

Mutex::Autolock lock(mMutex);

BENCHMARK_HERE(120);

FPSCOUNTER_HERE(120);

Camerametadata cm;

const V4l2Device::VBuffer *frame = NULL;

auto res = mDev->resolution();

status_t e;

Vector buffers;

auto timestamp = systemTime();

if(request->settings == NULL && mLastRequestSettings.isEmpty()) {

ALOGI(“processCaptureRequest error 1, First request does not have metadata, BAD_VALUE is %d”, BAD_VALUE);

return BAD_VALUE;

}

if(request->input_buffer) {

request->input_buffer->release_fence = -1;

}

if(!request->settings) {

cm.acquire(mLastRequestSettings);

} else {

cm = request->settings;

}

notifyShutter(request->frame_number, (uint64_t)timestamp);

BENCHMARK_SECTION(“Lock/Read”) {

frame = mDev->readLock();

}

if(!frame) {

ALOGI(“processCaptureRequest error 2, NOT_ENOUGH_DATA is %d”, NOT_ENOUGH_DATA);

return NOT_ENOUGH_DATA;

}

buffers.setCapacity(request->num_output_buffers);

uint8_t *rgbaBuffer = NULL;

char aviRecordering[PROPERTY_VALUE_MAX];

for(size_t i = 0; i < request->num_output_buffers; ++i) {

const camera3_stream_buffer &srcBuf = request->output_buffers[i];

uint8_t *buf = NULL;

sp acquireFence = new Fence(srcBuf.acquire_fence);

e = acquireFence->wait(1500);

if(e == TIMED_OUT) {

ALOGI(“processCaptureRequest buffer %p  frame %-4u  Wait on acquire fence timed out”, srcBuf.buffer, request->frame_number);

}

if(e == NO_ERROR) {

const Rect rect((int)srcBuf.stream->width, (int)srcBuf.stream->height);

e = GraphicBufferMapper::get().lock(*srcBuf.buffer, GRALLOC_USAGE_SW_WRITE_OFTEN, rect, (void **)&buf);

if(e != NO_ERROR) {

ALOGI(“processCaptureRequest buffer %p  frame %-4u  lock failed”, srcBuf.buffer, request->frame_number);

}

}

if(e != NO_ERROR) {

ALOGI(“processCaptureRequest error 3, e is %d, errno is %d, acquire_fence is %d”, e, errno, srcBuf.acquire_fence);

do GraphicBufferMapper::get().unlock(*request->output_buffers[i].buffer); while(i–);

return NO_INIT;

}

switch(srcBuf.stream->format) {

case HAL_PIXEL_FORMAT_RGBA_8888: {

if(!rgbaBuffer) {

BENCHMARK_SECTION(“YUV->RGBA”) {

if(frame->pixFmt == V4L2_PIX_FMT_YUYV) {

//mConverter.UYVYToRGBA(frame->buf, buf, res.width, res.height);

libyuv::YUY2ToI422(frame->buf, res.width*2,

mframeBuffer, res.width,

&mframeBuffer[res.width*res.height], res.width / 2,

&mframeBuffer[res.width*res.height + res.width*res.height / 2], res.width / 2,

res.width, res.height);

_AddTimesTamp(mframeBuffer, res.width, res.height);

libyuv::I422ToABGR(mframeBuffer, res.width,

&mframeBuffer[res.width*res.height], res.width / 2,

&mframeBuffer[res.width*res.height + res.width*res.height / 2], res.width / 2,

buf,res.width*4,

res.width, res.height);

} else if (frame->pixFmt == V4L2_PIX_FMT_MJPEG) {

libyuv::MJPGToI420(frame->buf, frame->len,

rszbuffer, res.width,

&rszbuffer[res.width * res.height], res.width / 2 ,

&rszbuffer[res.width * res.height * 5 / 4 ], res.width / 2 ,

res.width, res.height,

res.width, res.height);

_AddTimesTamp(rszbuffer, res.width, res.height);

libyuv::I420ToABGR(rszbuffer, res.width,

&rszbuffer[res.width * res.height], res.width / 2 ,

&rszbuffer[res.width * res.height * 5 / 4 ], res.width / 2 ,

buf, res.width*4,

res.width, res.height);

//     ALOGI("%s, MJPG convert done!", __FUNCTION__);

} else {

;//mConverter.YUY2ToRGBA(frame->buf, buf, res.width, res.height);

}

rgbaBuffer = buf;

}

} else {

BENCHMARK_SECTION(“Buf Copy”) {

memcpy(buf, rgbaBuffer, srcBuf.stream->width * srcBuf.stream->height * 4);

}

}

break;

}

case HAL_PIXEL_FORMAT_BLOB: {

BENCHMARK_SECTION(“YUV->JPEG”) {

const size_t maxImageSize = mJpegBufferSize - sizeof(camera3_jpeg_blob);

uint8_t jpegQuality = 95;

if(cm.exists(ANDROID_JPEG_QUALITY)) {

jpegQuality = *cm.find(ANDROID_JPEG_QUALITY).data.u8;

}

ALOGI(“JPEG quality = %u”, jpegQuality);

uint8_t *bufEnd = NULL;

if(frame->pixFmt == V4L2_PIX_FMT_UYVY)

{

ALOGI(“processCaptureRequest HAL_PIXEL_FORMAT_BLOB frame->pixFmt == V4L2_PIX_FMT_UYVY”);

// bufEnd = mConverter.UYVYToJPEG(frame->buf, buf, res.width, res.height, maxImageSize, jpegQuality);

}

else if (frame->pixFmt == V4L2_PIX_FMT_MJPEG) {

ALOGI(“processCaptureRequest HAL_PIXEL_FORMAT_BLOB frame->pixFmt == V4L2_PIX_FMT_MJPEG”);

int count = frame->len / mPageSize;

int mod = frame->len % mPageSize;

uint8_t *destbuf = buf;

uint8_t * srcBuf = frame->buf;

for(int i=0; i

{

memcpy(destbuf, srcBuf, mPageSize);

destbuf +=  mPageSize;

srcBuf += mPageSize;

}

if(mod != 0)

{

memcpy(destbuf, srcBuf, mod);

}

//memcpy(buf, frame->buf, frame->len);

bufEnd = buf + frame->len;

} else

{

ALOGI(“processCaptureRequest HAL_PIXEL_FORMAT_BLOB YUY2ToJPEG”);

// bufEnd = mConverter.YUY2ToJPEG(frame->buf, buf, res.width, res.height, maxImageSize, jpegQuality);

}

if(bufEnd != buf) {

camera3_jpeg_blob *jpegBlob = reinterpret_cast(buf + maxImageSize);

jpegBlob->jpeg_blob_id  = CAMERA3_JPEG_BLOB_ID;

jpegBlob->jpeg_size     = (uint32_t)(bufEnd - buf);

} else {

ALOGI("%s: JPEG image too big!", __FUNCTION__);

}

}

break;

}

default:

ALOGI(“Unknown pixel format %d in buffer %p (stream %p), ignoring”, srcBuf.stream->format, srcBuf.buffer, srcBuf.stream);

}

}

for(size_t i = 0; i < request->num_output_buffers; ++i) {

const camera3_stream_buffer &srcBuf = request->output_buffers[i];

GraphicBufferMapper::get().unlock(*srcBuf.buffer);

buffers.push_back(srcBuf);

buffers.editTop().acquire_fence = -1;

buffers.editTop().release_fence = -1;

buffers.editTop().status = CAMERA3_BUFFER_STATUS_OK;

}

BENCHMARK_SECTION(“Unlock”) {

mDev->unlock(frame);

}

int64_t sensorTimestamp = timestamp;

int64_t syncframeNumber = request->frame_number;

cm.update(ANDROID_SENSOR_TIMESTAMP, &sensorTimestamp, 1);

cm.update(ANDROID_SYNC_frame_NUMBER, &syncframeNumber, 1);

auto result = cm.getAndLock();

processCaptureResult(request->frame_number, result, buffers);

cm.unlock(result);

// Cache the settings for next time

mLastRequestSettings.acquire(cm);

char bmOut[1024];

BENCHMARK_STRING(bmOut, sizeof(bmOut), 6);

//  ALOGI("    time (avg):  %s", bmOut);

// ALOGI(“processCaptureRequest no error”);

return NO_ERROR;

}

inline void Camera::notifyShutter(uint32_t frameNumber, uint64_t timestamp) {

camera3_notify_msg_t msg;

msg.type = CAMERA3_MSG_SHUTTER;

msg.message.shutter.frame_number = frameNumber;

msg.message.shutter.timestamp = timestamp;

mCallbackOps->notify(mCallbackOps, &msg);

}

void Camera::processCaptureResult(uint32_t frameNumber, const camera_metadata_t *result, const Vector &buffers) {

camera3_capture_result captureResult;

captureResult.frame_number = frameNumber;

captureResult.result = result;

captureResult.num_output_buffers = buffers.size();

captureResult.output_buffers = buffers.array();

captureResult.input_buffer = NULL;

captureResult.partial_result = 0;

mCallbackOps->process_capture_result(mCallbackOps, &captureResult);

}

int Camera::sClose(hw_device_t *device) {

Camera *thiz = static_cast(reinterpret_cast(device));

return thiz->closeDevice();

}

int Camera::sInitialize(const camera3_device *device, const camera3_callback_ops_t *callback_ops) {

Camera *thiz = static_cast(const_cast(device));

return thiz->initialize(callback_ops);

}

int Camera::sConfigureStreams(const camera3_device *device, camera3_stream_configuration_t *stream_list) {

Camera *thiz = static_cast(const_cast(device));

return thiz->configureStreams(stream_list);

}

int Camera::sRegisterStreamBuffers(const camera3_device *device, const camera3_stream_buffer_set_t *buffer_set) {

Camera *thiz = static_cast(const_cast(device));

return thiz->registerStreamBuffers(buffer_set);

}

const camera_metadata_t * Camera::sConstructDefaultRequestSettings(const camera3_device *device, int type) {

Camera *thiz = static_cast(const_cast(device));

return thiz->constructDefaultRequestSettings(type);

}

int Camera::sProcessCaptureRequest(const camera3_device *device, camera3_capture_request_t *request) {

Camera *thiz = static_cast(const_cast(device));

return thiz->processCaptureRequest(request);

}

void Camera::sGetmetadataVendorTagOps(const camera3_device *device, vendor_tag_query_ops_t *ops) {

}

void Camera::sDump(const camera3_device *device, int fd) {

}

int Camera::sFlush(const camera3_device *device) {

return NO_ERROR;//-ENODEV;

}

camera3_device_ops_t Camera::sOps = {

.initialize                         = Camera::sInitialize,

.configure_streams                  = Camera::sConfigureStreams,

.register_stream_buffers            = Camera::sRegisterStreamBuffers,

.construct_default_request_settings = Camera::sConstructDefaultRequestSettings,

.process_capture_request            = Camera::sProcessCaptureRequest,

.get_metadata_vendor_tag_ops        = Camera::sGetmetadataVendorTagOps,

.dump                               = Camera::sDump,

.flush                              = Camera::sFlush,

.reserved = {0}

};

};

大家请注意,在Camera.cpp的Camera()里调用了common.version  = CAMERA_DEVICE_API_VERSION_3_2;  在cameraInfo里调用了info->device_version = CAMERA_DEVICE_API_VERSION_3_2; 这里表示,我们的hal的版本定义为了hal3.2。**

我们的HalModule.cpp的get_camera_info指向了HalModule::getCameraInfo,  getCameraInfo里又调用到了Camera::cameraInfo,然后Camera::cameraInfo通过staticCharacteristics来,获取我们usbcamera的属性。这样在app上就可以通过getCameraInfo来获取我们usbcamera的属性了。也可以使用下面的方法来获取我们预览或者拍尺寸等:

**public List getSupportedPreviewSizes() {

StreamConfigurationMap configMap;

CameraCharacteristics mCameraCharacteristics;

try {

configMap = mCameraCharacteristics.get(

CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

} catch (Exception ex) {

return new ArrayList<>(0);

}

ArrayList supportedPictureSizes = new ArrayList<>();

for (android.util.Size androidSize : configMap.getOutputSizes(SurfaceTexture.class)) {

supportedPictureSizes.add(new Size(androidSize));

}

int Camera::sFlush(const camera3_device *device) {

return NO_ERROR;//-ENODEV;

}

camera3_device_ops_t Camera::sOps = {

.initialize                         = Camera::sInitialize,

.configure_streams                  = Camera::sConfigureStreams,

.register_stream_buffers            = Camera::sRegisterStreamBuffers,

.construct_default_request_settings = Camera::sConstructDefaultRequestSettings,

.process_capture_request            = Camera::sProcessCaptureRequest,

.get_metadata_vendor_tag_ops        = Camera::sGetmetadataVendorTagOps,

.dump                               = Camera::sDump,

.flush                              = Camera::sFlush,

.reserved = {0}

};

};

大家请注意,在Camera.cpp的Camera()里调用了common.version  = CAMERA_DEVICE_API_VERSION_3_2;  在cameraInfo里调用了info->device_version = CAMERA_DEVICE_API_VERSION_3_2; 这里表示,我们的hal的版本定义为了hal3.2。**

我们的HalModule.cpp的get_camera_info指向了HalModule::getCameraInfo,  getCameraInfo里又调用到了Camera::cameraInfo,然后Camera::cameraInfo通过staticCharacteristics来,获取我们usbcamera的属性。这样在app上就可以通过getCameraInfo来获取我们usbcamera的属性了。也可以使用下面的方法来获取我们预览或者拍尺寸等:

**public List getSupportedPreviewSizes() {

StreamConfigurationMap configMap;

CameraCharacteristics mCameraCharacteristics;

try {

configMap = mCameraCharacteristics.get(

CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

} catch (Exception ex) {

return new ArrayList<>(0);

}

ArrayList supportedPictureSizes = new ArrayList<>();

for (android.util.Size androidSize : configMap.getOutputSizes(SurfaceTexture.class)) {

supportedPictureSizes.add(new Size(androidSize));

}

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/720407.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号