안녕하세요~ ndk로 영상처리관련 개발을 하고있습니다.
지금 개발 환경을 구축 하고 있는데요...
JNI 단에서 카메라신호(YUV)를 받아 각각 RGB를 표현하는건 했는데...
정작 Y신호를 못뿌려 주고있네요.. 데이터 처리를 잘못 해서 그런지...
소스 올립니다...
(flag 가 4일때 Y신호를 나타내 줍니다.)
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Camera.Parameters params = camera.getParameters();
width = params.getPreviewSize().width;
height = params.getPreviewSize().height;
prBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
//prBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
HISTOGRAMCOMPRESS(prBitmap, data, flag);
//onDraw(canvas);
/*canvas = new Canvas(prBitmap);
if(a == 0)
drawable.setBounds(100, 100, width, height);
drawable.draw(canvas);*/
/*canvas.setBitmap(prBitmap);
onDraw(canvas);
if(canvas != null)
holder.unlockCanvasAndPost(canvas);*/
mImageview.setImageBitmap(prBitmap);
//a = 1;
}
});
JNIEXPORT void JNICALL Java_com_joy_testnativecamera_MainActivity_HISTOGRAMCOMPRESS(JNIEnv * pEnv, jobject pObj, jobject pBitmap, jbyteArray pinArray, jint flag) {
AndroidBitmapInfo lBitmapInfo;
uint32_t* lBitmapContent;
int lRet;
// LOGE(1, "**IN JNI bitmap converter IN!");
//1. retrieve information about the bitmap
if ((lRet = AndroidBitmap_getInfo(pEnv, pBitmap, &lBitmapInfo)) < 0) {
LOGE(1, "AndroidBitmap_getInfo failed! error = %d", lRet);
return;
}
if (lBitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE(1, "Bitmap format is not RGBA_8888!");
return;
}
//2. lock the pixel buffer and retrieve a pointer to it
if ((lRet = AndroidBitmap_lockPixels(pEnv, pBitmap, (void**)&lBitmapContent)) < 0) {
LOGE(1, "AndroidBitmap_lockPixels() failed! error = %d", lRet);
return;
}
jbyte* lSource = (*pEnv)->GetPrimitiveArrayCritical(pEnv, pinArray, 0);
if (lSource == NULL) {
LOGE(1, "Source is null");
return;
}
//LOGE(1, "**Start JNI bitmap converter ");
int32_t lFrameSize = lBitmapInfo.width * lBitmapInfo.height;
int32_t lYIndex, lUVIndex;
int32_t lX, lY;
int32_t lColorY, lColorU, lColorV;
int32_t lColorR, lColorG, lColorB;
int32_t y1192;
// Processes each pixel and converts YUV to RGB color.
for (lY = 0, lYIndex = 0; lY < lBitmapInfo.height; ++lY) {
lColorU = 0; lColorV = 0;
// Y is divided by 2 because UVs are subsampled vertically.
// This means that two consecutives iterations refer to the
// same UV line (e.g when Y=0 and Y=1).
lUVIndex = lFrameSize + (lY >> 1) * lBitmapInfo.width;
for (lX = 0; lX < lBitmapInfo.width; ++lX, ++lYIndex) {
// Retrieves YUV components. UVs are subsampled
// horizontally too, hence %2 (1 UV for 2 Y).
lColorY = max(toInt(lSource[lYIndex]) - 16, 0);
if (!(lX % 2)) {
lColorV = toInt(lSource[lUVIndex++]) - 128;
lColorU = toInt(lSource[lUVIndex++]) - 128;
}
// Computes R, G and B from Y, U and V.
y1192 = 1192 * lColorY;
lColorR = (y1192 + 1634 * lColorV);
lColorG = (y1192 - 833 * lColorV - 400 * lColorU);
lColorB = (y1192 + 2066 * lColorU);
lColorR = clamp(lColorR, 0, 262143);
lColorG = clamp(lColorG, 0, 262143);
lColorB = clamp(lColorB, 0, 262143);
// Combines R, G, B and A into the final pixel color.
//lBitmapContent[lYIndex] = color(lColorR,lColorG,lColorB);
if(flag == 1)
lBitmapContent[lYIndex] = 0xFF000000 |((lColorR >> 10) & 0x000000FF);
else if(flag == 2)
lBitmapContent[lYIndex] = 0xFF000000 |((lColorG >> 2) & 0x0000FF00);
else if(flag == 3)
lBitmapContent[lYIndex] = 0xFF000000 |((lColorB << 6) & 0x00FF0000);
else if(flag == 4)
lBitmapContent[lYIndex] = lColorY ;
}
}
LOGE(1, "**Start JNI bitmap converter %d",lColorR);
(*pEnv)-> ReleasePrimitiveArrayCritical(pEnv,pinArray,lSource,0);
AndroidBitmap_unlockPixels(pEnv, pBitmap);
LOGI(1, "end color conversion2");
}