From 5c92d603d297e2bf0afd4c90bea8d418a6f47289 Mon Sep 17 00:00:00 2001
From: Miguel Angel Astor Romero
Date: Sat, 5 Apr 2014 15:03:53 -0430
Subject: [PATCH 01/18] Changed OpenCV to load statically.
---
jni/Android.mk | 2 +-
jni/cv_proc.cpp | 5 ----
project.properties | 2 +-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 24 ++++++++++++--------
4 files changed, 17 insertions(+), 16 deletions(-)
diff --git a/jni/Android.mk b/jni/Android.mk
index 2e2fa75..390adbc 100644
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -4,7 +4,7 @@ include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off
OPENCV_LIB_TYPE:=STATIC
-include C:\Users\miguel.astor\Documents\OpenCV-2.4.8-android-sdk\sdk\native\jni\OpenCV.mk
+include /home/miky/Escritorio/OpenCV-2.4.7-android-sdk/sdk/native/jni/OpenCV.mk
LOCAL_MODULE := cvproc
LOCAL_SRC_FILES := cv_proc.cpp marker.cpp
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 8afebd8..207c1fe 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -23,15 +23,10 @@
extern "C"{
#ifdef CAN_LOG
-
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
-
const char * TAG = "CVPROC_NATIVE";
-
#else
-
#define log(TAG, MSG) (1 + 1)
-
#endif
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(
diff --git a/project.properties b/project.properties
index 57ada93..13bb423 100644
--- a/project.properties
+++ b/project.properties
@@ -9,4 +9,4 @@
# Project target.
target=android-19
-android.library.reference.1=../../../../../Documents/OpenCV-2.4.8-android-sdk/sdk/java
+android.library.reference.1=../../../../../Escritorio/OpenCV-2.4.7-android-sdk/sdk/java
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index dfccb39..b75a6f5 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -51,16 +51,21 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
private MulticastLock multicastLock;
private Handler uiHandler;
private Context uiContext;
- private boolean ocvOn;
+ private static boolean ocvOn = false;
private BaseLoaderCallback loaderCallback;
private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- /*static{
- if (!OpenCVLoader.initDebug()){
- Gdx.app.exit();
- }
- System.loadLibrary("cvproc");
- }*/
+ static{
+ if(!OpenCVLoader.initDebug()){
+ Gdx.app.exit();
+ }
+ try{
+ System.loadLibrary("cvproc");
+ ocvOn = true;
+ }catch(UnsatisfiedLinkError u){
+ Gdx.app.exit();
+ }
+ }
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
@@ -68,7 +73,7 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
- ocvOn = false;
+ //ocvOn = false;
if(!Ouya.runningOnOuya){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
@@ -93,6 +98,7 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
case LoaderCallbackInterface.SUCCESS:
System.loadLibrary("cvproc");
ocvOn = true;
+ Toast.makeText(uiContext, R.string.ocv_success, Toast.LENGTH_LONG).show();
break;
default:
Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
@@ -102,7 +108,7 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
}
};
- OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_7, this, loaderCallback);
+ //OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_7, this, loaderCallback);
initialize(new NxtARCore(this), cfg);
}
From 453c3a36e593b1eb1c9fdfdbbfe3c55464d15770 Mon Sep 17 00:00:00 2001
From: Miguel Angel Astor Romero
Date: Wed, 9 Apr 2014 14:56:00 -0430
Subject: [PATCH 02/18] Added the camera calibration pattern detection.
---
jni/cv_proc.cpp | 66 ++-
jni/marker.cpp | 530 +++++++++----------
jni/marker.hpp | 20 +-
project.properties | 2 +-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 8 +-
5 files changed, 304 insertions(+), 322 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 207c1fe..cf71345 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -22,6 +22,7 @@
//#define CAN_LOG
extern "C"{
+
#ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
const char * TAG = "CVPROC_NATIVE";
@@ -29,38 +30,57 @@ const char * TAG = "CVPROC_NATIVE";
#define log(TAG, MSG) (1 + 1)
#endif
- JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(
+JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(
JNIEnv* env,
jobject jobj,
jlong addrMatIn,
jlong addrMatOut,
jintArray codes
- ){
- char codeMsg[128];
- std::vector vCodes;
+){
+ char codeMsg[128];
+ std::vector vCodes;
- log(TAG, "Requesting native data.");
+ log(TAG, "Requesting native data.");
- cv::Mat& myuv = *(cv::Mat*)addrMatIn;
- cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
- jint * _codes = env->GetIntArrayElements(codes, 0);
+ cv::Mat& myuv = *(cv::Mat*)addrMatIn;
+ cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+ jint * _codes = env->GetIntArrayElements(codes, 0);
- log(TAG, "Converting color space before processing.");
- cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
+ log(TAG, "Converting color space before processing.");
+ cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
- log(TAG, "Finding markers.");
- nxtar::getAllMarkers(vCodes, mbgr);
+ log(TAG, "Finding markers.");
+ nxtar::getAllMarkers(vCodes, mbgr);
- log(TAG, "Copying marker codes.");
- for(int i = 0; i < vCodes.size() && i < 15; i++){
- _codes[i] = vCodes[i];
- sprintf(codeMsg, "Code [%d] = %d", i, vCodes[i]);
- log(TAG, codeMsg);
- }
- vCodes.clear();
-
- log(TAG, "Releasing native data.");
-
- env->ReleaseIntArrayElements(codes, _codes, 0);
+ log(TAG, "Copying marker codes.");
+ for(int i = 0; i < vCodes.size() && i < 15; i++){
+ _codes[i] = vCodes[i];
+ sprintf(codeMsg, "Code [%d] = %d", i, vCodes[i]);
+ log(TAG, codeMsg);
}
+ vCodes.clear();
+
+ log(TAG, "Releasing native data.");
+
+ env->ReleaseIntArrayElements(codes, _codes, 0);
+}
+
+JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(
+ JNIEnv* env,
+ jobject jobj,
+ jlong addrMatIn,
+ jlong addrMatOut
+){
+ log(TAG, "Requesting native data.");
+
+ cv::Mat& myuv = *(cv::Mat*)addrMatIn;
+ cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+
+ log(TAG, "Converting color space before processing.");
+ cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
+
+ log(TAG, "Finding markers.");
+ nxtar::calibrateCamera(mbgr);
+}
+
}
diff --git a/jni/marker.cpp b/jni/marker.cpp
index c4eee4f..e4249e1 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -24,375 +24,323 @@
namespace nxtar{
- static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
+static int PATTERN_DETECTION_PARAMS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
+static const cv::TermCriteria termCriteria = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
+static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
+static const cv::Size checkersPatternSize(6, 9);
+
+float perimeter (points_vector &);
+int hammDistMarker (cv::Mat);
+cv::Mat rotate (cv::Mat);
+int decodeMarker (cv::Mat &);
+void renderMarkers (markers_vector &, cv::Mat &);
+void isolateMarkers (const contours_vector &, markers_vector &);
+void findContours (cv::Mat &, contours_vector &, int);
+void warpMarker (Marker &, cv::Mat &, cv::Mat &);
+
+void getAllMarkers(std::vector & codes, cv::Mat & img){
+ cv::Mat gray, thresh, cont, mark;
+ contours_vector contours;
+ markers_vector markers;
+ markers_vector valid_markers;
+#ifdef DESKTOP
+ std::ostringstream oss;
+#endif
- class Marker;
-
- typedef std::vector > contours_vector;
- typedef std::vector points_vector;
- typedef std::vector markers_vector;
+ codes.clear();
+
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
+ findContours(thresh, contours, 40);
+ isolateMarkers(contours, markers);
- class Marker{
- public:
- ~Marker();
- points_vector points;
- int code;
- };
+ for(int i = 0; i < markers.size(); i++){
+ warpMarker(markers[i], gray, mark);
- float perimeter(points_vector &);
- int hammDistMarker(cv::Mat);
- cv::Mat rotate(cv::Mat);
- void binarize(cv::Mat &, cv::Mat &);
- void findContours(cv::Mat &, contours_vector &, int);
- void renderContours(contours_vector &, cv::Mat &);
- void renderMarkers(markers_vector &, cv::Mat &);
- void warpMarker(Marker &, cv::Mat &, cv::Mat &);
- int decodeMarker(cv::Mat &);
- void fixCorners(cv::Mat &, Marker &);
- void isolateMarkers(const contours_vector &, markers_vector &);
+ int code = decodeMarker(mark);
- void getAllMarkers(std::vector & codes, cv::Mat & img){
- cv::Mat gray, thresh, cont, mark;
- contours_vector contours;
- markers_vector markers;
- markers_vector valid_markers;
+ if(code != -1){
+ markers[i].code = code;
+ valid_markers.push_back(markers[i]);
+ }
+ }
- codes.clear();
+ for(int i = 0; i < valid_markers.size(); i++){
+#ifdef DESKTOP
+ oss << valid_markers[i].code;
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- binarize(gray, thresh);
- findContours(thresh, contours, 40);
- isolateMarkers(contours, markers);
+ cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
- for(int i = 0; i < markers.size(); i++){
- warpMarker(markers[i], gray, mark);
+ oss.str("");
+ oss.clear();
- int code = decodeMarker(mark);
+ oss << "Marker[" << i << "]";
- if(code != -1){
- markers[i].code = code;
- valid_markers.push_back(markers[i]);
- }
- }
+ cv::imshow(oss.str(), mark);
- for(int i = 0; i < valid_markers.size(); i++)
- fixCorners(gray, valid_markers[i]);
+ oss.str("");
+ oss.clear();
+#endif
+ cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), termCriteria);
+ codes.push_back(valid_markers[i].code);
+ }
- cont = cv::Mat::zeros(img.size(), CV_8UC3);
- renderMarkers(valid_markers, cont);
+ cont = cv::Mat::zeros(img.size(), CV_8UC3);
+ renderMarkers(valid_markers, cont);
- img = img + cont;
+ img = img + cont;
- for(int i = 0; i < valid_markers.size(); i++){
- codes.push_back(valid_markers[i].code);
- }
+ markers.clear();
+ contours.clear();
+ valid_markers.clear();
+}
- markers.clear();
- contours.clear();
- valid_markers.clear();
- }
+void calibrateCamera(cv::Mat & img){
+ bool patternfound;
+ points_vector corners;
+ cv::Mat gray;
- #ifdef DESKTOP
- void getAllMarkers_d(std::vector & codes, cv::Mat & img){
- cv::Mat gray, thresh, cont, mark;
- contours_vector contours;
- markers_vector markers;
- markers_vector valid_markers;
- std::ostringstream oss;
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ patternfound = cv::findChessboardCorners(gray, checkersPatternSize, corners, PATTERN_DETECTION_PARAMS);
- codes.clear();
+ if(patternfound)
+ cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), termCriteria);
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- binarize(gray, thresh);
- findContours(thresh, contours, 40);
- isolateMarkers(contours, markers);
+ cv::drawChessboardCorners(img, checkersPatternSize, cv::Mat(corners), patternfound);
+}
- for(int i = 0; i < markers.size(); i++){
- warpMarker(markers[i], gray, mark);
+void findContours(cv::Mat & img, contours_vector & v, int minP){
+ std::vector > c;
+ cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
- int code = decodeMarker(mark);
+ v.clear();
+ for(size_t i = 0; i < c.size(); i++){
+ if(c[i].size() > minP){
+ v.push_back(c[i]);
+ }
+ }
+}
- if(code != -1){
- markers[i].code = code;
- valid_markers.push_back(markers[i]);
- }
- }
+void renderMarkers(markers_vector & v, cv::Mat & dst){
+ contours_vector cv;
- for(int i = 0; i < valid_markers.size(); i++)
- fixCorners(gray, valid_markers[i]);
+ for(size_t i = 0; i < v.size(); i++){
+ std::vector pv;
+ for(size_t j = 0; j < v[i].points.size(); ++j)
+ pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
+ cv.push_back(pv);
+ }
- cont = cv::Mat::zeros(img.size(), CV_8UC3);
- renderMarkers(valid_markers, cont);
+ cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
+}
- img = img + cont;
+void isolateMarkers(const contours_vector & vc, markers_vector & vm){
+ std::vector appCurve;
+ markers_vector posMarkers;
- for(int i = 0; i < valid_markers.size(); i++){
- oss << valid_markers[i].code;
+ for(size_t i = 0; i < vc.size(); ++i){
+ double eps = vc[i].size() * 0.05;
+ cv::approxPolyDP(vc[i], appCurve, eps, true);
- cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
+ if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
- oss.str("");
- oss.clear();
+ float minD = std::numeric_limits::max();
- oss << "Marker[" << i << "]";
+ for(int i = 0; i < 4; i++){
+ cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
+ float sqSideLen = side.dot(side);
+ minD = std::min(minD, sqSideLen);
+ }
- cv::imshow(oss.str(), mark);
+ if(minD < MIN_CONTOUR_LENGTH) continue;
- oss.str("");
- oss.clear();
+ Marker m;
- codes.push_back(valid_markers[i].code);
- }
+ for(int i = 0; i < 4; i++)
+ m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
- markers.clear();
- contours.clear();
- valid_markers.clear();
- }
- #endif
+ cv::Point v1 = m.points[1] - m.points[0];
+ cv::Point v2 = m.points[2] - m.points[0];
- void binarize(cv::Mat & src, cv::Mat & dst){
- cv::adaptiveThreshold(src, dst, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
- }
+ double o = (v1.x * v2.y) - (v1.y * v2.x);
+ if(o < 0.0) std::swap(m.points[1], m.points[3]);
- void findContours(cv::Mat & img, contours_vector & v, int minP){
- std::vector > c;
- cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
+ posMarkers.push_back(m);
+ }
- v.clear();
- for(size_t i = 0; i < c.size(); i++){
- if(c[i].size() > minP){
- v.push_back(c[i]);
- }
- }
- }
+ std::vector > tooNear;
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ const Marker & m1 = posMarkers[i];
- void renderContours(contours_vector & v, cv::Mat & dst){
- cv::drawContours(dst, v, -1, COLOR, 1, CV_AA);
- }
+ for(size_t j = i + 1; j < posMarkers.size(); j++){
+ const Marker & m2 = posMarkers[j];
- void renderMarkers(markers_vector & v, cv::Mat & dst){
- contours_vector cv;
+ float dSq = 0.0f;
- for(size_t i = 0; i < v.size(); i++){
- std::vector pv;
- for(size_t j = 0; j < v[i].points.size(); ++j)
- pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
- cv.push_back(pv);
- }
+ for(int c = 0; c < 4; c++){
+ cv::Point v = m1.points[c] - m2.points[c];
+ dSq += v.dot(v);
+ }
- cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
- }
+ dSq /= 4.0f;
- void isolateMarkers(const contours_vector & vc, markers_vector & vm){
- std::vector appCurve;
- markers_vector posMarkers;
+ if(dSq < 100) tooNear.push_back(std::pair(i, j));
+ }
+ }
- for(size_t i = 0; i < vc.size(); ++i){
- double eps = vc[i].size() * 0.05;
- cv::approxPolyDP(vc[i], appCurve, eps, true);
+ std::vector remMask(posMarkers.size(), false);
- if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
+ for(size_t i = 0; i < tooNear.size(); ++i){
+ float p1 = perimeter(posMarkers[tooNear[i].first].points);
+ float p2 = perimeter(posMarkers[tooNear[i].second].points);
- float minD = std::numeric_limits::max();
+ size_t remInd;
+ if(p1 > p2) remInd = tooNear[i].second;
+ else remInd = tooNear[i].first;
- for(int i = 0; i < 4; i++){
- cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
- float sqSideLen = side.dot(side);
- minD = std::min(minD, sqSideLen);
- }
+ remMask[remInd] = true;
+ }
- if(minD < MIN_CONTOUR_LENGTH) continue;
+ vm.clear();
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ if(remMask[i]) vm.push_back(posMarkers[i]);
+ }
+}
- Marker m;
+void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
+ cv::Mat bin;
+ cv::Size markerSize(350, 350);
+ points_vector v;
+ v.push_back(cv::Point2f(0,0));
+ v.push_back(cv::Point2f(markerSize.width-1,0));
+ v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
+ v.push_back(cv::Point2f(0,markerSize.height-1));
- for(int i = 0; i < 4; i++)
- m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
+ cv::Mat M = cv::getPerspectiveTransform(m.points, v);
+ cv::warpPerspective(in, bin, M, markerSize);
- cv::Point v1 = m.points[1] - m.points[0];
- cv::Point v2 = m.points[2] - m.points[0];
+ cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
+}
- double o = (v1.x * v2.y) - (v1.y * v2.x);
- if(o < 0.0) std::swap(m.points[1], m.points[3]);
+int hammDistMarker(cv::Mat bits){
+ int ids[4][5] = {
+ {1,0,0,0,0},
+ {1,0,1,1,1},
+ {0,1,0,0,1},
+ {0,1,1,1,0}
+ };
- posMarkers.push_back(m);
- }
+ int dist = 0;
- std::vector > tooNear;
- for(size_t i = 0; i < posMarkers.size(); ++i){
- const Marker & m1 = posMarkers[i];
+ for (int y = 0; y < 5; y++){
+ int minSum = 1e5;
- for(size_t j = i + 1; j < posMarkers.size(); j++){
- const Marker & m2 = posMarkers[j];
+ for (int p = 0; p < 4; p++){
+ int sum = 0;
- float dSq = 0.0f;
+ for (int x = 0; x < 5; x++){
+ sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
+ }
- for(int c = 0; c < 4; c++){
- cv::Point v = m1.points[c] - m2.points[c];
- dSq += v.dot(v);
- }
+ if(minSum > sum)
+ minSum = sum;
+ }
- dSq /= 4.0f;
+ dist += minSum;
+ }
- if(dSq < 100) tooNear.push_back(std::pair(i, j));
- }
- }
+ return dist;
+}
- std::vector remMask(posMarkers.size(), false);
+cv::Mat rotate(cv::Mat in){
+ cv::Mat out;
+ in.copyTo(out);
+ for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
+ }
+ }
- for(size_t i = 0; i < tooNear.size(); ++i){
- float p1 = perimeter(posMarkers[tooNear[i].first].points);
- float p2 = perimeter(posMarkers[tooNear[i].second].points);
+ return out;
+}
- size_t remInd;
- if(p1 > p2) remInd = tooNear[i].second;
- else remInd = tooNear[i].first;
+int decodeMarker(cv::Mat & marker){
+ bool found = false;
+ int code = 0;
+ cv::Mat bits;
- remMask[remInd] = true;
- }
+ for(int y = 0; y < 7; y++){
+ int inc = (y == 0 || y == 6) ? 1 : 6;
- vm.clear();
- for(size_t i = 0; i < posMarkers.size(); ++i){
- if(remMask[i]) vm.push_back(posMarkers[i]);
- }
- }
+ for(int x = 0; x < 7; x += inc){
+ int cX = x * 50;
+ int cY = y * 50;
- void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
- cv::Mat bin;
- cv::Size markerSize(350, 350);
- points_vector v;
- v.push_back(cv::Point2f(0,0));
- v.push_back(cv::Point2f(markerSize.width-1,0));
- v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
- v.push_back(cv::Point2f(0,markerSize.height-1));
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
- cv::Mat M = cv::getPerspectiveTransform(m.points, v);
- cv::warpPerspective(in, bin, M, markerSize);
+ int nZ = cv::countNonZero(cell);
- cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
- }
+ // Not a valid marker.
+ if(nZ > (50 * 50) / 2) return -1;
+ }
+ }
- int hammDistMarker(cv::Mat bits){
- int ids[4][5] = {
- {1,0,0,0,0},
- {1,0,1,1,1},
- {0,1,0,0,1},
- {0,1,1,1,0}
- };
+ bits = cv::Mat::zeros(5, 5, CV_8UC1);
- int dist = 0;
- for (int y = 0; y < 5; y++){
- int minSum = 1e5;
+ for(int y = 0; y < 5; y++){
+ for(int x = 0; x < 5; x++){
+ int cX = (x + 1) * 50;
+ int cY = (y + 1) * 50;
- for (int p = 0; p < 4; p++){
- int sum = 0;
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
- for (int x = 0; x < 5; x++){
- sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
- }
+ int nZ = cv::countNonZero(cell);
- if(minSum > sum)
- minSum = sum;
- }
+ if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
+ }
+ }
- dist += minSum;
- }
+ if(hammDistMarker(bits) != 0){
+ for(int r = 1; r < 4; r++){
+ bits = rotate(bits);
+ if(hammDistMarker(bits) != 0) continue;
+ else{ found = true; break;}
+ }
+ }else found = true;
- return dist;
- }
- cv::Mat rotate(cv::Mat in){
- cv::Mat out;
- in.copyTo(out);
- for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
- }
- }
- return out;
- }
+ if(found){
+ for(int y = 0; y < 5; y++){
+ code <<= 1;
+ if(bits.at(y, 1))
+ code |= 1;
- int decodeMarker(cv::Mat & marker){
- bool found = false;
- int code = 0;
- cv::Mat bits;
+ code <<= 1;
+ if(bits.at(y, 3))
+ code |= 1;
+ }
- for(int y = 0; y < 7; y++){
- int inc = (y == 0 || y == 6) ? 1 : 6;
- for(int x = 0; x < 7; x += inc){
- int cX = x * 50;
- int cY = y * 50;
+ return code;
+ }else
+ return -1;
+}
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+float perimeter(points_vector & p){
+ float per = 0.0f, dx, dy;
- int nZ = cv::countNonZero(cell);
+ for(size_t i; i < p.size(); ++i){
+ dx = p[i].x - p[(i + 1) % p.size()].x;
+ dy = p[i].y - p[(i + 1) % p.size()].y;
+ per += sqrt((dx * dx) + (dy * dy));
+ }
- // Not a valid marker.
- if(nZ > (50 * 50) / 2) return -1;
- }
- }
+ return per;
+}
- bits = cv::Mat::zeros(5, 5, CV_8UC1);
-
-
- for(int y = 0; y < 5; y++){
- for(int x = 0; x < 5; x++){
- int cX = (x + 1) * 50;
- int cY = (y + 1) * 50;
-
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
-
- int nZ = cv::countNonZero(cell);
-
- if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
- }
- }
-
- if(hammDistMarker(bits) != 0){
- for(int r = 1; r < 4; r++){
- bits = rotate(bits);
- if(hammDistMarker(bits) != 0) continue;
- else{ found = true; break;}
- }
- }else found = true;
-
-
- if(found){
- for(int y = 0; y < 5; y++){
- code <<= 1;
- if(bits.at(y, 1))
- code |= 1;
-
- code <<= 1;
- if(bits.at(y, 3))
- code |= 1;
- }
-
-
- return code;
- }else
- return -1;
- }
-
- void fixCorners(cv::Mat & img, Marker & m){
- cv::cornerSubPix(img, m.points, cvSize(10, 10), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER, 30, 0.1));
- }
-
- float perimeter(points_vector & p){
- float per = 0.0f, dx, dy;
-
- for(size_t i; i < p.size(); ++i){
- dx = p[i].x - p[(i + 1) % p.size()].x;
- dy = p[i].y - p[(i + 1) % p.size()].y;
- per += sqrt((dx * dx) + (dy * dy));
- }
-
- return per;
- }
-
- Marker::~Marker(){
- points.clear();
- }
+Marker::~Marker(){
+ points.clear();
+}
}
diff --git a/jni/marker.hpp b/jni/marker.hpp
index 1164729..41c9e24 100644
--- a/jni/marker.hpp
+++ b/jni/marker.hpp
@@ -20,10 +20,22 @@
#include
namespace nxtar{
- void getAllMarkers(std::vector &, cv::Mat &);
-#ifdef DESKTOP
- void getAllMarkers_d(std::vector &, cv::Mat &);
-#endif
+
+class Marker;
+
+typedef std::vector > contours_vector;
+typedef std::vector points_vector;
+typedef std::vector markers_vector;
+
+class Marker{
+public:
+ ~Marker();
+ points_vector points;
+ int code;
+};
+
+void getAllMarkers(std::vector &, cv::Mat &);
+void calibrateCamera(cv::Mat &);
}
#endif
diff --git a/project.properties b/project.properties
index 13bb423..34ab397 100644
--- a/project.properties
+++ b/project.properties
@@ -9,4 +9,4 @@
# Project target.
target=android-19
-android.library.reference.1=../../../../../Escritorio/OpenCV-2.4.7-android-sdk/sdk/java
+android.library.reference.1=../../../../../NVPACK/OpenCV-2.4.5-Tegra-sdk-r2/sdk/java
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index b75a6f5..da2085e 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -57,17 +57,18 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
static{
if(!OpenCVLoader.initDebug()){
- Gdx.app.exit();
+ System.exit(1);
}
try{
System.loadLibrary("cvproc");
ocvOn = true;
}catch(UnsatisfiedLinkError u){
- Gdx.app.exit();
+ System.exit(1);
}
}
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
+ public native void findCalibrationPattern(long inMat, long outMat);
@Override
public void onCreate(Bundle savedInstanceState){
@@ -167,7 +168,8 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
Mat outImg = new Mat();
Utils.bitmapToMat(tFrame, inImg);
- getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
+ //getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
+ findCalibrationPattern(inImg.getNativeObjAddr(), outImg.getNativeObjAddr());
Mat temp = new Mat();
Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);
From 249e6e30a48831c45ac20686a296190fe8556bcd Mon Sep 17 00:00:00 2001
From: unknown
Date: Thu, 10 Apr 2014 17:55:31 -0430
Subject: [PATCH 03/18] Started porting the camera calibration functions.
---
jni/Android.mk | 2 +-
jni/cv_proc.cpp | 58 +-
jni/marker.cpp | 722 +++++++++++--------
jni/marker.hpp | 27 +-
project.properties | 2 +-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 62 +-
6 files changed, 493 insertions(+), 380 deletions(-)
diff --git a/jni/Android.mk b/jni/Android.mk
index 390adbc..2e2fa75 100644
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -4,7 +4,7 @@ include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off
OPENCV_LIB_TYPE:=STATIC
-include /home/miky/Escritorio/OpenCV-2.4.7-android-sdk/sdk/native/jni/OpenCV.mk
+include C:\Users\miguel.astor\Documents\OpenCV-2.4.8-android-sdk\sdk\native\jni\OpenCV.mk
LOCAL_MODULE := cvproc
LOCAL_SRC_FILES := cv_proc.cpp marker.cpp
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index cf71345..26f1bb9 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -21,8 +21,6 @@
//#define CAN_LOG
-extern "C"{
-
#ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
const char * TAG = "CVPROC_NATIVE";
@@ -30,57 +28,61 @@ const char * TAG = "CVPROC_NATIVE";
#define log(TAG, MSG) (1 + 1)
#endif
-JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(
- JNIEnv* env,
- jobject jobj,
- jlong addrMatIn,
- jlong addrMatOut,
- jintArray codes
-){
+extern "C"{
+
+JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
char codeMsg[128];
std::vector vCodes;
- log(TAG, "Requesting native data.");
+ log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
- jint * _codes = env->GetIntArrayElements(codes, 0);
+ jint * _codes = env->GetIntArrayElements(codes, 0);
- log(TAG, "Converting color space before processing.");
+ log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
- log(TAG, "Finding markers.");
+ log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
nxtar::getAllMarkers(vCodes, mbgr);
- log(TAG, "Copying marker codes.");
+ log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(int i = 0; i < vCodes.size() && i < 15; i++){
- _codes[i] = vCodes[i];
- sprintf(codeMsg, "Code [%d] = %d", i, vCodes[i]);
- log(TAG, codeMsg);
+ _codes[i] = (jint)vCodes[i];
}
vCodes.clear();
- log(TAG, "Releasing native data.");
+ log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
}
-JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(
- JNIEnv* env,
- jobject jobj,
- jlong addrMatIn,
- jlong addrMatOut
-){
- log(TAG, "Requesting native data.");
+JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
+ nxtar::points_vector v_points;
+ bool found;
+
+ log(TAG, "findCalibrationPattern(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+ jfloat * _points = env->GetFloatArrayElements(points, 0);
- log(TAG, "Converting color space before processing.");
+ log(TAG, "findCalibrationPattern(): Converting color space before processing.");
cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
- log(TAG, "Finding markers.");
- nxtar::calibrateCamera(mbgr);
+ log(TAG, "findCalibrationPattern(): Finding calibration pattern.");
+ found = nxtar::findCalibrationPattern(v_points, mbgr);
+
+ log(TAG, "findCalibrationPattern(): Copying calibration points.");
+ for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
+ _points[p] = (jfloat)v_points[i].x;
+ _points[p + 1] = (jfloat)v_points[i].y;
+ }
+
+ log(TAG, "findCalibrationPattern(): Releasing native data.");
+ env->ReleaseFloatArrayElements(points, _points, 0);
+
+ return (jboolean)found;
}
}
diff --git a/jni/marker.cpp b/jni/marker.cpp
index e4249e1..5a39ab0 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -16,331 +16,451 @@
#include
#include
#include
+#ifdef DESKTOP
#include
+#endif
#include "marker.hpp"
-#define MIN_CONTOUR_LENGTH 0.1
-
namespace nxtar{
-static int PATTERN_DETECTION_PARAMS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
-static const cv::TermCriteria termCriteria = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
-static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
-static const cv::Size checkersPatternSize(6, 9);
+ typedef std::vector points_vector_3D;
+ typedef std::vector > contours_vector;
+ typedef std::vector markers_vector;
-float perimeter (points_vector &);
-int hammDistMarker (cv::Mat);
-cv::Mat rotate (cv::Mat);
-int decodeMarker (cv::Mat &);
-void renderMarkers (markers_vector &, cv::Mat &);
-void isolateMarkers (const contours_vector &, markers_vector &);
-void findContours (cv::Mat &, contours_vector &, int);
-void warpMarker (Marker &, cv::Mat &, cv::Mat &);
+/******************************************************************************
+ * PRIVATE CONSTANTS *
+ ******************************************************************************/
-void getAllMarkers(std::vector & codes, cv::Mat & img){
- cv::Mat gray, thresh, cont, mark;
- contours_vector contours;
- markers_vector markers;
- markers_vector valid_markers;
+ /**
+ * Size of a square cell in the calibration pattern.
+ */
+ static const float SQUARE_SIZE = 1.0f;
+
+ /**
+ * Minimal lenght of a contour to be considered as a marker candidate.
+ */
+ static const float MIN_CONTOUR_LENGTH = 0.1;
+
+ /**
+ * Flags for the calibration pattern detecion function.
+ */
+ static const int PATTERN_DETECTION_FLAGS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
+
+ /**
+ * Color for rendering the marker outlines.
+ */
+ static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
+
+ /**
+ * Size of the chessboard pattern image (columns, rows).
+ */
+ static const cv::Size CHESSBOARD_PATTERN_SIZE(6, 9);
+
+ /**
+ * Termination criteria for OpenCV's iterative algorithms.
+ */
+ static const cv::TermCriteria TERM_CRITERIA = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
+
+/******************************************************************************
+ * PRIVATE FUNCTION PROTOTYPES *
+ ******************************************************************************/
+
+ /**
+ * Calculates the perimeter of a points vector defining a polygon.
+ */
+ float perimeter(points_vector &);
+
+ /**
+ * Calculates the Hamming distance of a 5x5 marker.
+ */
+ int hammDistMarker(cv::Mat);
+
+ /**
+ * Rotates an OpenCV matrix in place by 90 degrees clockwise.
+ */
+ cv::Mat rotate(cv::Mat);
+
+ /**
+ * Returns the code of a 5x5 marker or -1 if the marker is not valid.
+ */
+ int decodeMarker(cv::Mat &);
+
+ /**
+ * Renders the polygon defined in the input vector on the specified image.
+ */
+ void renderMarkers(markers_vector &, cv::Mat &);
+
+ /**
+ * Identifies all possible marker candidates in a polygon vector.
+ */
+ void isolateMarkers(const contours_vector &, markers_vector &);
+
+ /**
+ * Identifies all roughly 4 side figures in the input image.
+ */
+ void findContours(cv::Mat &, contours_vector &, int);
+
+ /**
+ * Removes the prerspective distortion from a marker candidate image.
+ */
+ void warpMarker(Marker &, cv::Mat &, cv::Mat &);
+
+/******************************************************************************
+ * PUBLIC API *
+ ******************************************************************************/
+
+ void getAllMarkers(std::vector & codes, cv::Mat & img){
+ cv::Mat gray, thresh, cont, mark;
+ contours_vector contours;
+ markers_vector markers;
+ markers_vector valid_markers;
#ifdef DESKTOP
- std::ostringstream oss;
+ std::ostringstream oss;
#endif
- codes.clear();
+ codes.clear();
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
- findContours(thresh, contours, 40);
- isolateMarkers(contours, markers);
+ // Find all marker candidates in the input image.
+ // 1) First, convert the image to grayscale.
+ // 2) Then, binarize the grayscale image.
+ // 3) Finally indentify all 4 sided figures in the binarized image.
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
+ findContours(thresh, contours, 40);
+ isolateMarkers(contours, markers);
- for(int i = 0; i < markers.size(); i++){
- warpMarker(markers[i], gray, mark);
+ // Remove the perspective distortion from the detected marker candidates.
+ // Then attempt to decode them and push the valid ones into the valid
+ // markes vector.
+ for(int i = 0; i < markers.size(); i++){
+ warpMarker(markers[i], gray, mark);
- int code = decodeMarker(mark);
+ int code = decodeMarker(mark);
- if(code != -1){
- markers[i].code = code;
- valid_markers.push_back(markers[i]);
- }
- }
+ if(code != -1){
+ markers[i].code = code;
+ valid_markers.push_back(markers[i]);
+ }
+ }
- for(int i = 0; i < valid_markers.size(); i++){
+ for(int i = 0; i < valid_markers.size(); i++){
#ifdef DESKTOP
- oss << valid_markers[i].code;
+ // Render the detected valid markers with their codes for debbuging
+ // purposes.
+ oss << valid_markers[i].code;
- cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
+ cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
- oss.str("");
- oss.clear();
+ oss.str("");
+ oss.clear();
- oss << "Marker[" << i << "]";
+ oss << "Marker[" << i << "]";
- cv::imshow(oss.str(), mark);
+ cv::imshow(oss.str(), mark);
- oss.str("");
- oss.clear();
+ oss.str("");
+ oss.clear();
#endif
- cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), termCriteria);
- codes.push_back(valid_markers[i].code);
- }
+ // Fix the detected corners to better approximate the markers. And
+ // push their codes to the output vector.
+ cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
+ codes.push_back(valid_markers[i].code);
+ }
- cont = cv::Mat::zeros(img.size(), CV_8UC3);
- renderMarkers(valid_markers, cont);
+ // Render the detected markers on top of the input image.
+ cont = cv::Mat::zeros(img.size(), CV_8UC3);
+ renderMarkers(valid_markers, cont);
+ img = img + cont;
- img = img + cont;
-
- markers.clear();
- contours.clear();
- valid_markers.clear();
-}
-
-void calibrateCamera(cv::Mat & img){
- bool patternfound;
- points_vector corners;
- cv::Mat gray;
-
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- patternfound = cv::findChessboardCorners(gray, checkersPatternSize, corners, PATTERN_DETECTION_PARAMS);
-
- if(patternfound)
- cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), termCriteria);
-
- cv::drawChessboardCorners(img, checkersPatternSize, cv::Mat(corners), patternfound);
-}
-
-void findContours(cv::Mat & img, contours_vector & v, int minP){
- std::vector > c;
- cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
-
- v.clear();
- for(size_t i = 0; i < c.size(); i++){
- if(c[i].size() > minP){
- v.push_back(c[i]);
- }
- }
-}
-
-void renderMarkers(markers_vector & v, cv::Mat & dst){
- contours_vector cv;
-
- for(size_t i = 0; i < v.size(); i++){
- std::vector pv;
- for(size_t j = 0; j < v[i].points.size(); ++j)
- pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
- cv.push_back(pv);
- }
-
- cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
-}
-
-void isolateMarkers(const contours_vector & vc, markers_vector & vm){
- std::vector appCurve;
- markers_vector posMarkers;
-
- for(size_t i = 0; i < vc.size(); ++i){
- double eps = vc[i].size() * 0.05;
- cv::approxPolyDP(vc[i], appCurve, eps, true);
-
- if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
-
- float minD = std::numeric_limits::max();
-
- for(int i = 0; i < 4; i++){
- cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
- float sqSideLen = side.dot(side);
- minD = std::min(minD, sqSideLen);
- }
-
- if(minD < MIN_CONTOUR_LENGTH) continue;
-
- Marker m;
-
- for(int i = 0; i < 4; i++)
- m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
-
- cv::Point v1 = m.points[1] - m.points[0];
- cv::Point v2 = m.points[2] - m.points[0];
-
- double o = (v1.x * v2.y) - (v1.y * v2.x);
- if(o < 0.0) std::swap(m.points[1], m.points[3]);
-
- posMarkers.push_back(m);
- }
-
- std::vector > tooNear;
- for(size_t i = 0; i < posMarkers.size(); ++i){
- const Marker & m1 = posMarkers[i];
-
- for(size_t j = i + 1; j < posMarkers.size(); j++){
- const Marker & m2 = posMarkers[j];
-
- float dSq = 0.0f;
-
- for(int c = 0; c < 4; c++){
- cv::Point v = m1.points[c] - m2.points[c];
- dSq += v.dot(v);
- }
-
- dSq /= 4.0f;
-
- if(dSq < 100) tooNear.push_back(std::pair(i, j));
- }
- }
-
- std::vector remMask(posMarkers.size(), false);
-
- for(size_t i = 0; i < tooNear.size(); ++i){
- float p1 = perimeter(posMarkers[tooNear[i].first].points);
- float p2 = perimeter(posMarkers[tooNear[i].second].points);
-
- size_t remInd;
- if(p1 > p2) remInd = tooNear[i].second;
- else remInd = tooNear[i].first;
-
- remMask[remInd] = true;
- }
-
- vm.clear();
- for(size_t i = 0; i < posMarkers.size(); ++i){
- if(remMask[i]) vm.push_back(posMarkers[i]);
- }
-}
-
-void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
- cv::Mat bin;
- cv::Size markerSize(350, 350);
- points_vector v;
- v.push_back(cv::Point2f(0,0));
- v.push_back(cv::Point2f(markerSize.width-1,0));
- v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
- v.push_back(cv::Point2f(0,markerSize.height-1));
-
- cv::Mat M = cv::getPerspectiveTransform(m.points, v);
- cv::warpPerspective(in, bin, M, markerSize);
-
- cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
-}
-
-int hammDistMarker(cv::Mat bits){
- int ids[4][5] = {
- {1,0,0,0,0},
- {1,0,1,1,1},
- {0,1,0,0,1},
- {0,1,1,1,0}
- };
-
- int dist = 0;
-
- for (int y = 0; y < 5; y++){
- int minSum = 1e5;
-
- for (int p = 0; p < 4; p++){
- int sum = 0;
-
- for (int x = 0; x < 5; x++){
- sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
- }
-
- if(minSum > sum)
- minSum = sum;
- }
-
- dist += minSum;
- }
-
- return dist;
-}
-
-cv::Mat rotate(cv::Mat in){
- cv::Mat out;
- in.copyTo(out);
- for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
- }
- }
-
- return out;
-}
-
-int decodeMarker(cv::Mat & marker){
- bool found = false;
- int code = 0;
- cv::Mat bits;
-
- for(int y = 0; y < 7; y++){
- int inc = (y == 0 || y == 6) ? 1 : 6;
-
- for(int x = 0; x < 7; x += inc){
- int cX = x * 50;
- int cY = y * 50;
-
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
-
- int nZ = cv::countNonZero(cell);
-
- // Not a valid marker.
- if(nZ > (50 * 50) / 2) return -1;
- }
- }
-
- bits = cv::Mat::zeros(5, 5, CV_8UC1);
-
-
- for(int y = 0; y < 5; y++){
- for(int x = 0; x < 5; x++){
- int cX = (x + 1) * 50;
- int cY = (y + 1) * 50;
-
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
-
- int nZ = cv::countNonZero(cell);
-
- if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
- }
- }
-
- if(hammDistMarker(bits) != 0){
- for(int r = 1; r < 4; r++){
- bits = rotate(bits);
- if(hammDistMarker(bits) != 0) continue;
- else{ found = true; break;}
- }
- }else found = true;
-
-
- if(found){
- for(int y = 0; y < 5; y++){
- code <<= 1;
- if(bits.at(y, 1))
- code |= 1;
-
- code <<= 1;
- if(bits.at(y, 3))
- code |= 1;
- }
-
-
- return code;
- }else
- return -1;
-}
-
-float perimeter(points_vector & p){
- float per = 0.0f, dx, dy;
-
- for(size_t i; i < p.size(); ++i){
- dx = p[i].x - p[(i + 1) % p.size()].x;
- dy = p[i].y - p[(i + 1) % p.size()].y;
- per += sqrt((dx * dx) + (dy * dy));
- }
-
- return per;
-}
-
-Marker::~Marker(){
- points.clear();
-}
+ // Clear the local vectors.
+ markers.clear();
+ contours.clear();
+ valid_markers.clear();
+ }
+ bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
+ bool patternfound;
+ cv::Mat gray;
+
+ // Convert the input image to grayscale and attempt to find the
+ // calibration pattern.
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ patternfound = cv::findChessboardCorners(gray, CHESSBOARD_PATTERN_SIZE, corners, PATTERN_DETECTION_FLAGS);
+
+ // If the pattern was found then fix the detected points a bit.
+ if(patternfound)
+ cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), TERM_CRITERIA);
+
+ // Render the detected pattern.
+ cv::drawChessboardCorners(img, CHESSBOARD_PATTERN_SIZE, cv::Mat(corners), patternfound);
+
+ return patternfound;
+ }
+
+ double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::vector & image_points, cv::Size image_size){
+ std::vector rvecs, tvecs;
+ std::vector object_points;
+ points_vector_3D corner_points;
+
+ // Build the reference object points vector;
+ for(int i = 0; i < CHESSBOARD_PATTERN_SIZE.height; i++){
+ for(int j = 0; j < CHESSBOARD_PATTERN_SIZE.width; j++){
+ corner_points.push_back(cv::Point3f(float( j * SQUARE_SIZE ), float( i * SQUARE_SIZE ), 0));
+ }
+ }
+ object_points.push_back(corner_points);
+ object_points.resize(image_points.size(), object_points[0]);
+
+ // Build a camera matrix.
+ camera_matrix = cv::Mat::eye(3, 3, CV_64F);
+
+ // Build the distortion coefficients matrix.
+ dist_coeffs = cv::Mat::zeros(8, 1, CV_64F);
+
+ // Calibrate and return the reprojection error.
+ return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
+ }
+
+/******************************************************************************
+ * PRIVATE HELPER FUNCTIONS *
+ ******************************************************************************/
+
+ void findContours(cv::Mat & img, contours_vector & v, int minP){
+ contours_vector c;
+ cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
+
+ v.clear();
+ for(size_t i = 0; i < c.size(); i++){
+ if(c[i].size() > minP){
+ v.push_back(c[i]);
+ }
+ }
+ }
+
+ void renderMarkers(markers_vector & v, cv::Mat & dst){
+ contours_vector cv;
+
+ for(size_t i = 0; i < v.size(); i++){
+ std::vector pv;
+ for(size_t j = 0; j < v[i].points.size(); ++j)
+ pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
+ cv.push_back(pv);
+ }
+
+ cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
+ }
+
+ void isolateMarkers(const contours_vector & vc, markers_vector & vm){
+ std::vector appCurve;
+ markers_vector posMarkers;
+
+ for(size_t i = 0; i < vc.size(); ++i){
+ double eps = vc[i].size() * 0.05;
+ cv::approxPolyDP(vc[i], appCurve, eps, true);
+
+ if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
+
+ float minD = std::numeric_limits::max();
+
+ for(int i = 0; i < 4; i++){
+ cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
+ float sqSideLen = side.dot(side);
+ minD = std::min(minD, sqSideLen);
+ }
+
+ if(minD < MIN_CONTOUR_LENGTH) continue;
+
+ Marker m;
+
+ for(int i = 0; i < 4; i++)
+ m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
+
+ cv::Point v1 = m.points[1] - m.points[0];
+ cv::Point v2 = m.points[2] - m.points[0];
+
+ double o = (v1.x * v2.y) - (v1.y * v2.x);
+ if(o < 0.0) std::swap(m.points[1], m.points[3]);
+
+ posMarkers.push_back(m);
+ }
+
+ std::vector > tooNear;
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ const Marker & m1 = posMarkers[i];
+
+ for(size_t j = i + 1; j < posMarkers.size(); j++){
+ const Marker & m2 = posMarkers[j];
+
+ float dSq = 0.0f;
+
+ for(int c = 0; c < 4; c++){
+ cv::Point v = m1.points[c] - m2.points[c];
+ dSq += v.dot(v);
+ }
+
+ dSq /= 4.0f;
+
+ if(dSq < 100) tooNear.push_back(std::pair(i, j));
+ }
+ }
+
+ std::vector remMask(posMarkers.size(), false);
+
+ for(size_t i = 0; i < tooNear.size(); ++i){
+ float p1 = perimeter(posMarkers[tooNear[i].first].points);
+ float p2 = perimeter(posMarkers[tooNear[i].second].points);
+
+ size_t remInd;
+ if(p1 > p2) remInd = tooNear[i].second;
+ else remInd = tooNear[i].first;
+
+ remMask[remInd] = true;
+ }
+
+ vm.clear();
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ if(remMask[i]) vm.push_back(posMarkers[i]);
+ }
+ }
+
+ void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
+ cv::Mat bin;
+ cv::Size markerSize(350, 350);
+ points_vector v;
+ v.push_back(cv::Point2f(0,0));
+ v.push_back(cv::Point2f(markerSize.width-1,0));
+ v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
+ v.push_back(cv::Point2f(0,markerSize.height-1));
+
+ cv::Mat M = cv::getPerspectiveTransform(m.points, v);
+ cv::warpPerspective(in, bin, M, markerSize);
+
+ cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
+ }
+
+ int hammDistMarker(cv::Mat bits){
+ int ids[4][5] = {
+ {1,0,0,0,0},
+ {1,0,1,1,1},
+ {0,1,0,0,1},
+ {0,1,1,1,0}
+ };
+
+ int dist = 0;
+
+ for (int y = 0; y < 5; y++){
+ int minSum = 1e5;
+
+ for (int p = 0; p < 4; p++){
+ int sum = 0;
+
+ for (int x = 0; x < 5; x++){
+ sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
+ }
+
+ if(minSum > sum)
+ minSum = sum;
+ }
+
+ dist += minSum;
+ }
+
+ return dist;
+ }
+
+ cv::Mat rotate(cv::Mat in){
+ cv::Mat out;
+ in.copyTo(out);
+ for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
+ }
+ }
+
+ return out;
+ }
+
+ int decodeMarker(cv::Mat & marker){
+ bool found = false;
+ int code = 0;
+ cv::Mat bits;
+
+ for(int y = 0; y < 7; y++){
+ int inc = (y == 0 || y == 6) ? 1 : 6;
+
+ for(int x = 0; x < 7; x += inc){
+ int cX = x * 50;
+ int cY = y * 50;
+
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+
+ int nZ = cv::countNonZero(cell);
+
+ // Not a valid marker.
+ if(nZ > (50 * 50) / 2) return -1;
+ }
+ }
+
+ bits = cv::Mat::zeros(5, 5, CV_8UC1);
+
+
+ for(int y = 0; y < 5; y++){
+ for(int x = 0; x < 5; x++){
+ int cX = (x + 1) * 50;
+ int cY = (y + 1) * 50;
+
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+
+ int nZ = cv::countNonZero(cell);
+
+ if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
+ }
+ }
+
+ if(hammDistMarker(bits) != 0){
+ for(int r = 1; r < 4; r++){
+ bits = rotate(bits);
+ if(hammDistMarker(bits) != 0) continue;
+ else{ found = true; break;}
+ }
+ }else found = true;
+
+
+ if(found){
+ for(int y = 0; y < 5; y++){
+ code <<= 1;
+ if(bits.at(y, 1))
+ code |= 1;
+
+ code <<= 1;
+ if(bits.at(y, 3))
+ code |= 1;
+ }
+
+
+ return code;
+ }else
+ return -1;
+ }
+
+ float perimeter(points_vector & p){
+ float per = 0.0f, dx, dy;
+
+ for(size_t i; i < p.size(); ++i){
+ dx = p[i].x - p[(i + 1) % p.size()].x;
+ dy = p[i].y - p[(i + 1) % p.size()].y;
+ per += sqrt((dx * dx) + (dy * dy));
+ }
+
+ return per;
+ }
+
+/******************************************************************************
+ * CLASS METHODS *
+ ******************************************************************************/
+
+ Marker::~Marker(){
+ points.clear();
+ }
}
diff --git a/jni/marker.hpp b/jni/marker.hpp
index 41c9e24..001e825 100644
--- a/jni/marker.hpp
+++ b/jni/marker.hpp
@@ -17,15 +17,12 @@
#define MARKER_HPP
#include
+
#include
namespace nxtar{
-class Marker;
-
-typedef std::vector > contours_vector;
-typedef std::vector points_vector;
-typedef std::vector markers_vector;
+typedef std::vector points_vector;
class Marker{
public:
@@ -34,8 +31,26 @@ public:
int code;
};
+/**
+ * Detect all 5x5 markers in the input image and return their codes in the
+ * output vector.
+ */
void getAllMarkers(std::vector &, cv::Mat &);
-void calibrateCamera(cv::Mat &);
+
+/**
+ * Find a chessboard calibration pattern in the input image. Returns true
+ * if the pattern was found, false otherwise. The calibration points
+ * detected on the image are saved in the output vector.
+ */
+bool findCalibrationPattern(points_vector &, cv::Mat &);
+
+/**
+ * Sets the camera matrix and the distortion coefficients for the camera
+ * that captured the input image points into the output matrices. Returns
+ * the reprojection error as returned by cv::calibrateCamera.
+ */
+double getCameraParameters(cv::Mat &, cv::Mat &, std::vector &, cv::Size);
+
}
#endif
diff --git a/project.properties b/project.properties
index 34ab397..57ada93 100644
--- a/project.properties
+++ b/project.properties
@@ -9,4 +9,4 @@
# Project target.
target=android-19
-android.library.reference.1=../../../../../NVPACK/OpenCV-2.4.5-Tegra-sdk-r2/sdk/java
+android.library.reference.1=../../../../../Documents/OpenCV-2.4.8-android-sdk/sdk/java
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index da2085e..0644af4 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -17,16 +17,13 @@ package ve.ucv.ciens.ccg.nxtar;
import java.io.ByteArrayOutputStream;
-import org.opencv.android.BaseLoaderCallback;
-import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import ve.ucv.ciens.ccg.nxtar.interfaces.CVProcessor;
-import ve.ucv.ciens.ccg.nxtar.interfaces.MulticastEnabler;
-import ve.ucv.ciens.ccg.nxtar.interfaces.Toaster;
+import ve.ucv.ciens.ccg.nxtar.interfaces.OSFunctionalityProvider;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
@@ -43,39 +40,37 @@ import com.badlogic.gdx.backends.android.AndroidApplication;
import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration;
import com.badlogic.gdx.controllers.mappings.Ouya;
-public class MainActivity extends AndroidApplication implements Toaster, MulticastEnabler, CVProcessor{
+public class MainActivity extends AndroidApplication implements OSFunctionalityProvider, CVProcessor{
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = MainActivity.class.getSimpleName();
+ private static boolean ocvOn = false;
+
private WifiManager wifiManager;
private MulticastLock multicastLock;
private Handler uiHandler;
private Context uiContext;
- private static boolean ocvOn = false;
- private BaseLoaderCallback loaderCallback;
private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
+ public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
+
static{
- if(!OpenCVLoader.initDebug()){
- System.exit(1);
- }
+ if(!OpenCVLoader.initDebug())
+ ocvOn = false;
+
try{
System.loadLibrary("cvproc");
ocvOn = true;
}catch(UnsatisfiedLinkError u){
- System.exit(1);
+ ocvOn = false;
}
}
- public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
- public native void findCalibrationPattern(long inMat, long outMat);
-
@Override
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
- //ocvOn = false;
-
if(!Ouya.runningOnOuya){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}else{
@@ -92,30 +87,12 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
cfg.useCompass = false;
cfg.useWakelock = true;
- loaderCallback = new BaseLoaderCallback(this){
- @Override
- public void onManagerConnected(int status){
- switch(status){
- case LoaderCallbackInterface.SUCCESS:
- System.loadLibrary("cvproc");
- ocvOn = true;
- Toast.makeText(uiContext, R.string.ocv_success, Toast.LENGTH_LONG).show();
- break;
- default:
- Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
- Gdx.app.exit();
- break;
- }
- }
- };
-
- //OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_7, this, loaderCallback);
initialize(new NxtARCore(this), cfg);
}
- ////////////////////////////////
- // Toaster interface methods. //
- ////////////////////////////////
+ ////////////////////////////////////////////////
+ // OSFunctionalityProvider interface methods. //
+ ////////////////////////////////////////////////
@Override
public void showShortToast(final String msg){
uiHandler.post(new Runnable(){
@@ -136,9 +113,6 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
});
}
- /////////////////////////////////////////
- // MulticastEnabler interface methods. //
- /////////////////////////////////////////
@Override
public void enableMulticast(){
Gdx.app.log(TAG, CLASS_NAME + ".enableMulticast() :: Requesting multicast lock.");
@@ -156,8 +130,11 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
}
}
+ ////////////////////////////////////
+ // CVProcessor interface methods. //
+ ////////////////////////////////////
@Override
- public CVData processFrame(byte[] frame, int w, int h) {
+ public CVData findMarkersInFrame(byte[] frame, int w, int h) {
if(ocvOn){
int codes[] = new int[15];
Bitmap tFrame, mFrame;
@@ -168,8 +145,7 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
Mat outImg = new Mat();
Utils.bitmapToMat(tFrame, inImg);
- //getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
- findCalibrationPattern(inImg.getNativeObjAddr(), outImg.getNativeObjAddr());
+ getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
Mat temp = new Mat();
Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);
From b3d678f0783345be42e3cd71c1c91f1244011fea Mon Sep 17 00:00:00 2001
From: unknown
Date: Mon, 28 Apr 2014 10:42:38 -0430
Subject: [PATCH 04/18] Ported camera calibration code.
---
jni/Android.mk | 3 +-
jni/marker.cpp | 684 ++++++++++---------
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 6 +
3 files changed, 368 insertions(+), 325 deletions(-)
diff --git a/jni/Android.mk b/jni/Android.mk
index 2e2fa75..6cad0e2 100644
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -3,8 +3,9 @@ LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off
-OPENCV_LIB_TYPE:=STATIC
+OPENCV_LIB_TYPE:=STATIC #SHARED
include C:\Users\miguel.astor\Documents\OpenCV-2.4.8-android-sdk\sdk\native\jni\OpenCV.mk
+#include C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\jni\OpenCV-tegra3.mk
LOCAL_MODULE := cvproc
LOCAL_SRC_FILES := cv_proc.cpp marker.cpp
diff --git a/jni/marker.cpp b/jni/marker.cpp
index 5a39ab0..664669f 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -24,443 +24,479 @@
namespace nxtar{
- typedef std::vector points_vector_3D;
- typedef std::vector > contours_vector;
- typedef std::vector markers_vector;
+typedef std::vector points_vector_3D;
+typedef std::vector > contours_vector;
+typedef std::vector markers_vector;
/******************************************************************************
* PRIVATE CONSTANTS *
******************************************************************************/
- /**
- * Size of a square cell in the calibration pattern.
- */
- static const float SQUARE_SIZE = 1.0f;
+/**
+ * Minimal number of points in a contour.
+ */
+static const int MIN_POINTS = 40;
- /**
- * Minimal lenght of a contour to be considered as a marker candidate.
- */
- static const float MIN_CONTOUR_LENGTH = 0.1;
+/**
+ * Size of a square cell in the calibration pattern.
+ */
+static const float SQUARE_SIZE = 1.0f;
- /**
- * Flags for the calibration pattern detecion function.
- */
- static const int PATTERN_DETECTION_FLAGS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
+/**
+ * Minimal lenght of a contour to be considered as a marker candidate.
+ */
+static const float MIN_CONTOUR_LENGTH = 0.1;
- /**
- * Color for rendering the marker outlines.
- */
- static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
+/**
+ * Flags for the calibration pattern detecion function.
+ */
+static const int PATTERN_DETECTION_FLAGS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
- /**
- * Size of the chessboard pattern image (columns, rows).
- */
- static const cv::Size CHESSBOARD_PATTERN_SIZE(6, 9);
+/**
+ * Color for rendering the marker outlines.
+ */
+static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
- /**
- * Termination criteria for OpenCV's iterative algorithms.
- */
- static const cv::TermCriteria TERM_CRITERIA = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
+/**
+ * Size of the chessboard pattern image (columns, rows).
+ */
+static const cv::Size CHESSBOARD_PATTERN_SIZE(6, 9);
+
+/**
+ * Termination criteria for OpenCV's iterative algorithms.
+ */
+static const cv::TermCriteria TERM_CRITERIA = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
/******************************************************************************
* PRIVATE FUNCTION PROTOTYPES *
******************************************************************************/
- /**
- * Calculates the perimeter of a points vector defining a polygon.
- */
- float perimeter(points_vector &);
+float perimeter(points_vector &);
- /**
- * Calculates the Hamming distance of a 5x5 marker.
- */
- int hammDistMarker(cv::Mat);
+int hammDistMarker(cv::Mat);
- /**
- * Rotates an OpenCV matrix in place by 90 degrees clockwise.
- */
- cv::Mat rotate(cv::Mat);
+cv::Mat rotate(cv::Mat);
- /**
- * Returns the code of a 5x5 marker or -1 if the marker is not valid.
- */
- int decodeMarker(cv::Mat &);
+int decodeMarker(cv::Mat &);
- /**
- * Renders the polygon defined in the input vector on the specified image.
- */
- void renderMarkers(markers_vector &, cv::Mat &);
+void renderMarkers(markers_vector &, cv::Mat &);
- /**
- * Identifies all possible marker candidates in a polygon vector.
- */
- void isolateMarkers(const contours_vector &, markers_vector &);
+void isolateMarkers(const contours_vector &, markers_vector &);
- /**
- * Identifies all roughly 4 side figures in the input image.
- */
- void findContours(cv::Mat &, contours_vector &, int);
+void findContours(cv::Mat &, contours_vector &, int);
- /**
- * Removes the prerspective distortion from a marker candidate image.
- */
- void warpMarker(Marker &, cv::Mat &, cv::Mat &);
+void warpMarker(Marker &, cv::Mat &, cv::Mat &);
/******************************************************************************
* PUBLIC API *
******************************************************************************/
- void getAllMarkers(std::vector & codes, cv::Mat & img){
- cv::Mat gray, thresh, cont, mark;
- contours_vector contours;
- markers_vector markers;
- markers_vector valid_markers;
+void getAllMarkers(std::vector & codes, cv::Mat & img){
+ cv::Mat gray, thresh, cont, mark;
+ contours_vector contours;
+ markers_vector markers;
+ markers_vector valid_markers;
#ifdef DESKTOP
- std::ostringstream oss;
+ std::ostringstream oss;
#endif
- codes.clear();
+ codes.clear();
- // Find all marker candidates in the input image.
- // 1) First, convert the image to grayscale.
- // 2) Then, binarize the grayscale image.
- // 3) Finally indentify all 4 sided figures in the binarized image.
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
- findContours(thresh, contours, 40);
- isolateMarkers(contours, markers);
+ // Find all marker candidates in the input image.
+ // 1) First, convert the image to grayscale.
+ // 2) Then, binarize the grayscale image.
+ // 3) Finally indentify all 4 sided figures in the binarized image.
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
+ findContours(thresh, contours, MIN_POINTS);
+ isolateMarkers(contours, markers);
- // Remove the perspective distortion from the detected marker candidates.
- // Then attempt to decode them and push the valid ones into the valid
- // markes vector.
- for(int i = 0; i < markers.size(); i++){
- warpMarker(markers[i], gray, mark);
+ // Remove the perspective distortion from the detected marker candidates.
+ // Then attempt to decode them and push the valid ones into the valid
+ // markers vector.
+ for(int i = 0; i < markers.size(); i++){
+ warpMarker(markers[i], gray, mark);
- int code = decodeMarker(mark);
+ int code = decodeMarker(mark);
- if(code != -1){
- markers[i].code = code;
- valid_markers.push_back(markers[i]);
- }
- }
+ if(code != -1){
+ markers[i].code = code;
+ valid_markers.push_back(markers[i]);
+ }
+ }
- for(int i = 0; i < valid_markers.size(); i++){
+ for(int i = 0; i < valid_markers.size(); i++){
#ifdef DESKTOP
- // Render the detected valid markers with their codes for debbuging
- // purposes.
- oss << valid_markers[i].code;
+ // Render the detected valid markers with their codes for debbuging
+ // purposes.
+ oss << valid_markers[i].code;
- cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
+ cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
- oss.str("");
- oss.clear();
+ oss.str("");
+ oss.clear();
- oss << "Marker[" << i << "]";
+ oss << "Marker[" << i << "]";
- cv::imshow(oss.str(), mark);
+ cv::imshow(oss.str(), mark);
- oss.str("");
- oss.clear();
+ oss.str("");
+ oss.clear();
#endif
- // Fix the detected corners to better approximate the markers. And
- // push their codes to the output vector.
- cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
- codes.push_back(valid_markers[i].code);
- }
+ // Fix the detected corners to better approximate the markers. And
+ // push their codes to the output vector.
+ cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
+ codes.push_back(valid_markers[i].code);
+ }
- // Render the detected markers on top of the input image.
- cont = cv::Mat::zeros(img.size(), CV_8UC3);
- renderMarkers(valid_markers, cont);
- img = img + cont;
+ // Render the detected markers on top of the input image.
+ cont = cv::Mat::zeros(img.size(), CV_8UC3);
+ renderMarkers(valid_markers, cont);
+ img = img + cont;
- // Clear the local vectors.
- markers.clear();
- contours.clear();
- valid_markers.clear();
- }
+ // Clear the local vectors.
+ markers.clear();
+ contours.clear();
+ valid_markers.clear();
+}
- bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
- bool patternfound;
- cv::Mat gray;
+bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
+ bool patternfound;
+ cv::Mat gray;
- // Convert the input image to grayscale and attempt to find the
- // calibration pattern.
- cv::cvtColor(img, gray, CV_BGR2GRAY);
- patternfound = cv::findChessboardCorners(gray, CHESSBOARD_PATTERN_SIZE, corners, PATTERN_DETECTION_FLAGS);
+ // Convert the input image to grayscale and attempt to find the
+ // calibration pattern.
+ cv::cvtColor(img, gray, CV_BGR2GRAY);
+ patternfound = cv::findChessboardCorners(gray, CHESSBOARD_PATTERN_SIZE, corners, PATTERN_DETECTION_FLAGS);
- // If the pattern was found then fix the detected points a bit.
- if(patternfound)
- cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), TERM_CRITERIA);
+ // If the pattern was found then fix the detected points a bit.
+ if(patternfound)
+ cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), TERM_CRITERIA);
- // Render the detected pattern.
- cv::drawChessboardCorners(img, CHESSBOARD_PATTERN_SIZE, cv::Mat(corners), patternfound);
+ // Render the detected pattern.
+ cv::drawChessboardCorners(img, CHESSBOARD_PATTERN_SIZE, cv::Mat(corners), patternfound);
- return patternfound;
- }
+ return patternfound;
+}
- double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::vector & image_points, cv::Size image_size){
- std::vector rvecs, tvecs;
- std::vector object_points;
- points_vector_3D corner_points;
+double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::vector & image_points, cv::Size image_size){
+ std::vector rvecs, tvecs;
+ std::vector object_points;
+ points_vector_3D corner_points;
- // Build the reference object points vector;
- for(int i = 0; i < CHESSBOARD_PATTERN_SIZE.height; i++){
- for(int j = 0; j < CHESSBOARD_PATTERN_SIZE.width; j++){
- corner_points.push_back(cv::Point3f(float( j * SQUARE_SIZE ), float( i * SQUARE_SIZE ), 0));
- }
- }
- object_points.push_back(corner_points);
- object_points.resize(image_points.size(), object_points[0]);
+ // Build the reference object points vector.
+ for(int i = 0; i < CHESSBOARD_PATTERN_SIZE.height; i++){
+ for(int j = 0; j < CHESSBOARD_PATTERN_SIZE.width; j++){
+ corner_points.push_back(cv::Point3f(float( j * SQUARE_SIZE ), float( i * SQUARE_SIZE ), 0));
+ }
+ }
+ object_points.push_back(corner_points);
+ object_points.resize(image_points.size(), object_points[0]);
- // Build a camera matrix.
- camera_matrix = cv::Mat::eye(3, 3, CV_64F);
+ // Build a camera matrix.
+ camera_matrix = cv::Mat::eye(3, 3, CV_64F);
- // Build the distortion coefficients matrix.
- dist_coeffs = cv::Mat::zeros(8, 1, CV_64F);
+ // Build the distortion coefficients matrix.
+ dist_coeffs = cv::Mat::zeros(8, 1, CV_64F);
- // Calibrate and return the reprojection error.
- return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
- }
+ // Calibrate and return the reprojection error.
+ return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
+}
/******************************************************************************
* PRIVATE HELPER FUNCTIONS *
******************************************************************************/
- void findContours(cv::Mat & img, contours_vector & v, int minP){
- contours_vector c;
- cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
+/**
+ * Find all contours in the input image and save them to the output
+ * vector.
+ */
+void findContours(cv::Mat & img, contours_vector & v, int minP){
+ contours_vector c;
- v.clear();
- for(size_t i = 0; i < c.size(); i++){
- if(c[i].size() > minP){
- v.push_back(c[i]);
- }
- }
- }
+ // A contour is discarded if it possess less than the specified
+ // minimum number of points.
+ cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
- void renderMarkers(markers_vector & v, cv::Mat & dst){
- contours_vector cv;
+ v.clear();
+ for(size_t i = 0; i < c.size(); i++){
+ if(c[i].size() > minP){
+ v.push_back(c[i]);
+ }
+ }
+}
- for(size_t i = 0; i < v.size(); i++){
- std::vector pv;
- for(size_t j = 0; j < v[i].points.size(); ++j)
- pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
- cv.push_back(pv);
- }
+/**
+ * Render the input marker vector onto the output image.
+ */
+void renderMarkers(markers_vector & v, cv::Mat & dst){
+ contours_vector cv;
- cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
- }
+ // Extract the points that form every marker into a contours vector.
+ for(size_t i = 0; i < v.size(); i++){
+ std::vector pv;
+ for(size_t j = 0; j < v[i].points.size(); ++j)
+ pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
+ cv.push_back(pv);
+ }
- void isolateMarkers(const contours_vector & vc, markers_vector & vm){
- std::vector appCurve;
- markers_vector posMarkers;
+ // Render.
+ cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
+}
- for(size_t i = 0; i < vc.size(); ++i){
- double eps = vc[i].size() * 0.05;
- cv::approxPolyDP(vc[i], appCurve, eps, true);
+/**
+ * Identify and return all marker candidates.
+ */
+void isolateMarkers(const contours_vector & vc, markers_vector & vm){
+ std::vector appCurve;
+ markers_vector posMarkers;
- if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
+ // For every detected contour.
+ for(size_t i = 0; i < vc.size(); ++i){
+ double eps = vc[i].size() * 0.05;
- float minD = std::numeric_limits::max();
+ // Approximate the contour with a polygon.
+ cv::approxPolyDP(vc[i], appCurve, eps, true);
- for(int i = 0; i < 4; i++){
- cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
- float sqSideLen = side.dot(side);
- minD = std::min(minD, sqSideLen);
- }
+ // If the polygon is not a cuadrilateral then this is not a marker
+ // candidate.
+ if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
- if(minD < MIN_CONTOUR_LENGTH) continue;
+ // Calculate the lenght of the perimeter of this candidate. If it
+ // is too short then discard it.
+ float minD = std::numeric_limits::max();
- Marker m;
+ for(int i = 0; i < 4; i++){
+ cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
+ float sqSideLen = side.dot(side);
+ minD = std::min(minD, sqSideLen);
+ }
- for(int i = 0; i < 4; i++)
- m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
+ if(minD < MIN_CONTOUR_LENGTH) continue;
- cv::Point v1 = m.points[1] - m.points[0];
- cv::Point v2 = m.points[2] - m.points[0];
+ // Save the marker and order it's points counter-clockwise.
+ Marker m;
- double o = (v1.x * v2.y) - (v1.y * v2.x);
- if(o < 0.0) std::swap(m.points[1], m.points[3]);
+ for(int i = 0; i < 4; i++)
+ m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
- posMarkers.push_back(m);
- }
+ cv::Point v1 = m.points[1] - m.points[0];
+ cv::Point v2 = m.points[2] - m.points[0];
- std::vector > tooNear;
- for(size_t i = 0; i < posMarkers.size(); ++i){
- const Marker & m1 = posMarkers[i];
+ double o = (v1.x * v2.y) - (v1.y * v2.x);
+ if(o < 0.0) std::swap(m.points[1], m.points[3]);
- for(size_t j = i + 1; j < posMarkers.size(); j++){
- const Marker & m2 = posMarkers[j];
+ posMarkers.push_back(m);
+ }
- float dSq = 0.0f;
+ // Identify contours that are to close to each other to eliminate
+ // possible duplicates.
+ std::vector > tooNear;
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ const Marker & m1 = posMarkers[i];
- for(int c = 0; c < 4; c++){
- cv::Point v = m1.points[c] - m2.points[c];
- dSq += v.dot(v);
- }
+ for(size_t j = i + 1; j < posMarkers.size(); j++){
+ const Marker & m2 = posMarkers[j];
- dSq /= 4.0f;
+ float dSq = 0.0f;
- if(dSq < 100) tooNear.push_back(std::pair(i, j));
- }
- }
+ for(int c = 0; c < 4; c++){
+ cv::Point v = m1.points[c] - m2.points[c];
+ dSq += v.dot(v);
+ }
- std::vector remMask(posMarkers.size(), false);
+ dSq /= 4.0f;
- for(size_t i = 0; i < tooNear.size(); ++i){
- float p1 = perimeter(posMarkers[tooNear[i].first].points);
- float p2 = perimeter(posMarkers[tooNear[i].second].points);
+ if(dSq < 100) tooNear.push_back(std::pair(i, j));
+ }
+ }
- size_t remInd;
- if(p1 > p2) remInd = tooNear[i].second;
- else remInd = tooNear[i].first;
+ // Mark one of every pair of duplicates to be discarded.
+ std::vector remMask(posMarkers.size(), false);
+ for(size_t i = 0; i < tooNear.size(); ++i){
+ float p1 = perimeter(posMarkers[tooNear[i].first].points);
+ float p2 = perimeter(posMarkers[tooNear[i].second].points);
- remMask[remInd] = true;
- }
+ size_t remInd;
+ if(p1 > p2) remInd = tooNear[i].second;
+ else remInd = tooNear[i].first;
- vm.clear();
- for(size_t i = 0; i < posMarkers.size(); ++i){
- if(remMask[i]) vm.push_back(posMarkers[i]);
- }
- }
+ remMask[remInd] = true;
+ }
- void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
- cv::Mat bin;
- cv::Size markerSize(350, 350);
- points_vector v;
- v.push_back(cv::Point2f(0,0));
- v.push_back(cv::Point2f(markerSize.width-1,0));
- v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
- v.push_back(cv::Point2f(0,markerSize.height-1));
+ // Save the candidates that survided the duplicates test.
+ vm.clear();
+ for(size_t i = 0; i < posMarkers.size(); ++i){
+ if(!remMask[i]) vm.push_back(posMarkers[i]);
+ }
+}
- cv::Mat M = cv::getPerspectiveTransform(m.points, v);
- cv::warpPerspective(in, bin, M, markerSize);
+/**
+ * Warp a marker image to remove it's perspective distortion.
+ */
+void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
+ cv::Mat bin;
+ cv::Size markerSize(350, 350);
+ points_vector v;
- cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
- }
+ // Assemble a unitary reference polygon.
+ v.push_back(cv::Point2f(0,0));
+ v.push_back(cv::Point2f(markerSize.width-1,0));
+ v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
+ v.push_back(cv::Point2f(0,markerSize.height-1));
- int hammDistMarker(cv::Mat bits){
- int ids[4][5] = {
- {1,0,0,0,0},
- {1,0,1,1,1},
- {0,1,0,0,1},
- {0,1,1,1,0}
- };
+ // Warp the input image's perspective to transform it into the reference
+ // polygon's perspective.
+ cv::Mat M = cv::getPerspectiveTransform(m.points, v);
+ cv::warpPerspective(in, bin, M, markerSize);
- int dist = 0;
+ // Binarize the warped image into the output image.
+ cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
+}
- for (int y = 0; y < 5; y++){
- int minSum = 1e5;
+/**
+ * Calculate the hamming distance of a 5x5 bit matrix.
+ */
+int hammDistMarker(cv::Mat bits){
+ int ids[4][5] = {
+ {1,0,0,0,0},
+ {1,0,1,1,1},
+ {0,1,0,0,1},
+ {0,1,1,1,0}
+ };
- for (int p = 0; p < 4; p++){
- int sum = 0;
+ int dist = 0;
- for (int x = 0; x < 5; x++){
- sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
- }
+ for (int y = 0; y < 5; y++){
+ int minSum = 1e5;
- if(minSum > sum)
- minSum = sum;
- }
+ for (int p = 0; p < 4; p++){
+ int sum = 0;
- dist += minSum;
- }
+ for (int x = 0; x < 5; x++){
+ sum += bits.at(y, x) == ids[p][x] ? 0 : 1;
+ }
- return dist;
- }
+ if(minSum > sum)
+ minSum = sum;
+ }
- cv::Mat rotate(cv::Mat in){
- cv::Mat out;
- in.copyTo(out);
- for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
- }
- }
+ dist += minSum;
+ }
- return out;
- }
+ return dist;
+}
- int decodeMarker(cv::Mat & marker){
- bool found = false;
- int code = 0;
- cv::Mat bits;
+/**
+ * Rotate a matrix by 90 degrees clockwise.
+ */
+cv::Mat rotate(cv::Mat in){
+ cv::Mat out;
- for(int y = 0; y < 7; y++){
- int inc = (y == 0 || y == 6) ? 1 : 6;
+ in.copyTo(out);
+ for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
+ }
+ }
- for(int x = 0; x < 7; x += inc){
- int cX = x * 50;
- int cY = y * 50;
+ return out;
+}
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+/**
+ * Decode a marker image and return it's code. Returns -1 if the image is
+ * not a valid marker.
+ */
+int decodeMarker(cv::Mat & marker){
+ bool found = false;
+ int code = 0;
+ cv::Mat bits;
- int nZ = cv::countNonZero(cell);
+ // Verify that the outer rim of marker cells are all black.
+ for(int y = 0; y < 7; y++){
+ int inc = (y == 0 || y == 6) ? 1 : 6;
- // Not a valid marker.
- if(nZ > (50 * 50) / 2) return -1;
- }
- }
+ for(int x = 0; x < 7; x += inc){
+ int cX = x * 50;
+ int cY = y * 50;
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
- bits = cv::Mat::zeros(5, 5, CV_8UC1);
+ int nZ = cv::countNonZero(cell);
+
+ // If one of the rim cells is 50% white or more then this
+ // is not a valid marker.
+ if(nZ > (50 * 50) / 2) return -1;
+ }
+ }
+
+ // Create a 5x5 matrix to hold a simplified representation of this
+ // marker.
+ bits = cv::Mat::zeros(5, 5, CV_8UC1);
+
+ // For every cell in the marker flip it's corresponding 'bit' in the
+ // bit matrix if it is at least 50 % white.
+ for(int y = 0; y < 5; y++){
+ for(int x = 0; x < 5; x++){
+ int cX = (x + 1) * 50;
+ int cY = (y + 1) * 50;
+ cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+ int nZ = cv::countNonZero(cell);
+
+ if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
+ }
+ }
+
+ // Calcultate the hamming distance of the bit matrix and each of it's
+ // 90 degree rotations to determine if this marker has a valid code.
+ if(hammDistMarker(bits) != 0){
+ for(int r = 1; r < 4; r++){
+ bits = rotate(bits);
+ if(hammDistMarker(bits) != 0) continue;
+ else{ found = true; break;}
+ }
+ }else found = true;
+
+ // If the code is valid then decode it to an int and return it.
+ if(found){
+ for(int y = 0; y < 5; y++){
+ code <<= 1;
+ if(bits.at(y, 1))
+ code |= 1;
+
+ code <<= 1;
+ if(bits.at(y, 3))
+ code |= 1;
+ }
- for(int y = 0; y < 5; y++){
- for(int x = 0; x < 5; x++){
- int cX = (x + 1) * 50;
- int cY = (y + 1) * 50;
+ return code;
+ }else
+ return -1;
+}
- cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
+/**
+ * Calculate the perimeter of a polygon defined as a vector of points.
+ */
+float perimeter(points_vector & p){
+ float per = 0.0f, dx, dy;
- int nZ = cv::countNonZero(cell);
+ for(size_t i; i < p.size(); ++i){
+ dx = p[i].x - p[(i + 1) % p.size()].x;
+ dy = p[i].y - p[(i + 1) % p.size()].y;
+ per += sqrt((dx * dx) + (dy * dy));
+ }
- if(nZ > (50 * 50) / 2) bits.at(y, x) = 1;
- }
- }
-
- if(hammDistMarker(bits) != 0){
- for(int r = 1; r < 4; r++){
- bits = rotate(bits);
- if(hammDistMarker(bits) != 0) continue;
- else{ found = true; break;}
- }
- }else found = true;
-
-
- if(found){
- for(int y = 0; y < 5; y++){
- code <<= 1;
- if(bits.at(y, 1))
- code |= 1;
-
- code <<= 1;
- if(bits.at(y, 3))
- code |= 1;
- }
-
-
- return code;
- }else
- return -1;
- }
-
- float perimeter(points_vector & p){
- float per = 0.0f, dx, dy;
-
- for(size_t i; i < p.size(); ++i){
- dx = p[i].x - p[(i + 1) % p.size()].x;
- dy = p[i].y - p[(i + 1) % p.size()].y;
- per += sqrt((dx * dx) + (dy * dy));
- }
-
- return per;
- }
+ return per;
+}
/******************************************************************************
* CLASS METHODS *
******************************************************************************/
- Marker::~Marker(){
- points.clear();
- }
+/**
+ * Clear the points vector associated with this marker.
+ */
+Marker::~Marker(){
+ points.clear();
+}
}
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 0644af4..c328b33 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -169,4 +169,10 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
return null;
}
}
+
+ @Override
+ public void calibrateCamera() {
+ // TODO Auto-generated method stub
+
+ }
}
\ No newline at end of file
From 784b14c9e9775fdf5cc826d275e039ec194ebd44 Mon Sep 17 00:00:00 2001
From: unknown
Date: Wed, 30 Apr 2014 13:05:36 -0430
Subject: [PATCH 05/18] Implemented the calibration interface.
---
jni/cv_proc.cpp | 18 ++--
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 92 +++++++++++++++++---
2 files changed, 92 insertions(+), 18 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 26f1bb9..26d90ce 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -23,13 +23,14 @@
#ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
-const char * TAG = "CVPROC_NATIVE";
#else
#define log(TAG, MSG) (1 + 1)
#endif
extern "C"{
+const char * TAG = "CVPROC_NATIVE";
+
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
char codeMsg[128];
std::vector vCodes;
@@ -39,12 +40,13 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jint * _codes = env->GetIntArrayElements(codes, 0);
+ cv::Mat temp;
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
- cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
+ cv::cvtColor(myuv, temp, CV_RGB2BGR);
log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
- nxtar::getAllMarkers(vCodes, mbgr);
+ nxtar::getAllMarkers(vCodes, temp);
log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(int i = 0; i < vCodes.size() && i < 15; i++){
@@ -52,8 +54,9 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
}
vCodes.clear();
- log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
+ cv::cvtColor(temp, mbgr, CV_BGR2RGB);
+ log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
}
@@ -66,12 +69,13 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
+ cv::Mat temp;
log(TAG, "findCalibrationPattern(): Converting color space before processing.");
- cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
+ cv::cvtColor(myuv, temp, CV_RGB2BGR);
log(TAG, "findCalibrationPattern(): Finding calibration pattern.");
- found = nxtar::findCalibrationPattern(v_points, mbgr);
+ found = nxtar::findCalibrationPattern(v_points, temp);
log(TAG, "findCalibrationPattern(): Copying calibration points.");
for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
@@ -79,6 +83,8 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
_points[p + 1] = (jfloat)v_points[i].y;
}
+ cv::cvtColor(temp, mbgr, CV_BGR2RGB);
+
log(TAG, "findCalibrationPattern(): Releasing native data.");
env->ReleaseFloatArrayElements(points, _points, 0);
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index c328b33..96c40ec 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -24,6 +24,7 @@ import org.opencv.imgproc.Imgproc;
import ve.ucv.ciens.ccg.nxtar.interfaces.CVProcessor;
import ve.ucv.ciens.ccg.nxtar.interfaces.OSFunctionalityProvider;
+import ve.ucv.ciens.ccg.nxtar.utils.ProjectConstants;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
@@ -45,6 +46,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
private static final String CLASS_NAME = MainActivity.class.getSimpleName();
private static boolean ocvOn = false;
+ private static Mat cameraMatrix, distortionCoeffs;
private WifiManager wifiManager;
private MulticastLock multicastLock;
@@ -71,6 +73,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
+ cameraMatrix = new Mat();
+ distortionCoeffs = new Mat();
+
if(!Ouya.runningOnOuya){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}else{
@@ -134,7 +139,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
// CVProcessor interface methods. //
////////////////////////////////////
@Override
- public CVData findMarkersInFrame(byte[] frame, int w, int h) {
+ public CVMarkerData findMarkersInFrame(byte[] frame){
if(ocvOn){
int codes[] = new int[15];
Bitmap tFrame, mFrame;
@@ -147,14 +152,14 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
- Mat temp = new Mat();
- Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);
+ //Mat temp = new Mat();
+ //Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);
- mFrame = Bitmap.createBitmap(temp.cols(), temp.rows(), Bitmap.Config.RGB_565);
- Utils.matToBitmap(temp, mFrame);
+ mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
+ Utils.matToBitmap(outImg, mFrame);
mFrame.compress(CompressFormat.JPEG, 100, outputStream);
- CVData data = new CVData();
+ CVMarkerData data = new CVMarkerData();
data.outFrame = outputStream.toByteArray();
data.markerCodes = codes;
@@ -164,15 +169,78 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
return data;
}else{
- Gdx.app.debug(TAG, CLASS_NAME + ".processFrame(): OpenCV is not ready or failed to load.");
-
+ Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not ready or failed to load.");
return null;
}
}
@Override
- public void calibrateCamera() {
- // TODO Auto-generated method stub
-
+ public CVCalibrationData findCalibrationPattern(byte[] frame){
+ if(ocvOn){
+ boolean found;
+ float points[] = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2];
+ Bitmap tFrame, mFrame;
+ Mat inImg = new Mat(), outImg = new Mat();
+ CVCalibrationData data = new CVCalibrationData();
+
+ // Decode the input frame and convert it to an OpenCV Matrix.
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
+ Utils.bitmapToMat(tFrame, inImg);
+
+ // Attempt to find the calibration pattern in the input frame.
+ found = findCalibrationPattern(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), points);
+
+ // Encode the output image as a JPEG image.
+ mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
+ Utils.matToBitmap(outImg, mFrame);
+ mFrame.compress(CompressFormat.JPEG, 100, outputStream);
+
+ // Prepare the output data structure.
+ data.outFrame = outputStream.toByteArray();
+ data.calibrationPoints = found ? points : null;
+
+ // Clean up memory.
+ tFrame.recycle();
+ mFrame.recycle();
+ outputStream.reset();
+
+ return data;
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".findCalibrationPattern(): OpenCV is not ready or failed to load.");
+ return null;
+ }
}
-}
\ No newline at end of file
+ @Override
+ public byte[] undistortFrame(byte[] frame){
+ if(ocvOn){
+ byte undistortedFrame[];
+ Bitmap tFrame, mFrame;
+ Mat inImg = new Mat(), outImg = new Mat();
+
+ // Decode the input frame and convert it to an OpenCV Matrix.
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
+ Utils.bitmapToMat(tFrame, inImg);
+
+ // Apply the undistort correction to the input frame.
+ Imgproc.undistort(inImg, outImg, cameraMatrix, distortionCoeffs);
+
+ // Encode the output image as a JPEG image.
+ mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
+ Utils.matToBitmap(outImg, mFrame);
+ mFrame.compress(CompressFormat.JPEG, 100, outputStream);
+
+ // Prepare the return frame.
+ undistortedFrame = outputStream.toByteArray();
+
+ // Clean up memory.
+ tFrame.recycle();
+ mFrame.recycle();
+ outputStream.reset();
+
+ return undistortedFrame;
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".undistortFrame(): OpenCV is not ready or failed to load.");
+ return null;
+ }
+ }
+}
From c29f36a997997d54b65d0820141089b9b2d619d8 Mon Sep 17 00:00:00 2001
From: unknown
Date: Wed, 30 Apr 2014 16:19:59 -0430
Subject: [PATCH 06/18] Added c/c++ builder and finished calibration jni calls.
---
.cproject | 80 +++++++++++++++++++
.settings/org.eclipse.cdt.codan.core.prefs | 67 ++++++++++++++++
jni/Android.mk | 65 +++++++++++++++-
jni/cv_proc.cpp | 58 +++++++++++++-
jni/marker.cpp | 7 +-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 81 ++++++++++++++++++--
6 files changed, 343 insertions(+), 15 deletions(-)
create mode 100644 .cproject
create mode 100644 .settings/org.eclipse.cdt.codan.core.prefs
diff --git a/.cproject b/.cproject
new file mode 100644
index 0000000..6d33835
--- /dev/null
+++ b/.cproject
@@ -0,0 +1,80 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.settings/org.eclipse.cdt.codan.core.prefs b/.settings/org.eclipse.cdt.codan.core.prefs
new file mode 100644
index 0000000..77386c2
--- /dev/null
+++ b/.settings/org.eclipse.cdt.codan.core.prefs
@@ -0,0 +1,67 @@
+eclipse.preferences.version=1
+org.eclipse.cdt.codan.checkers.errnoreturn=Warning
+org.eclipse.cdt.codan.checkers.errnoreturn.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},implicit\=>false}
+org.eclipse.cdt.codan.checkers.errreturnvalue=Error
+org.eclipse.cdt.codan.checkers.errreturnvalue.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.checkers.noreturn=Error
+org.eclipse.cdt.codan.checkers.noreturn.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},implicit\=>false}
+org.eclipse.cdt.codan.internal.checkers.AbstractClassCreation=Error
+org.eclipse.cdt.codan.internal.checkers.AbstractClassCreation.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.AmbiguousProblem=Error
+org.eclipse.cdt.codan.internal.checkers.AmbiguousProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.AssignmentInConditionProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.AssignmentInConditionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.AssignmentToItselfProblem=Error
+org.eclipse.cdt.codan.internal.checkers.AssignmentToItselfProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.CaseBreakProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.CaseBreakProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},no_break_comment\=>"no break",last_case_param\=>false,empty_case_param\=>false}
+org.eclipse.cdt.codan.internal.checkers.CatchByReference=Warning
+org.eclipse.cdt.codan.internal.checkers.CatchByReference.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},unknown\=>false,exceptions\=>()}
+org.eclipse.cdt.codan.internal.checkers.CircularReferenceProblem=Error
+org.eclipse.cdt.codan.internal.checkers.CircularReferenceProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.ClassMembersInitialization=Warning
+org.eclipse.cdt.codan.internal.checkers.ClassMembersInitialization.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},skip\=>true}
+org.eclipse.cdt.codan.internal.checkers.FieldResolutionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.FieldResolutionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.FunctionResolutionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.FunctionResolutionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.InvalidArguments=Error
+org.eclipse.cdt.codan.internal.checkers.InvalidArguments.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.InvalidTemplateArgumentsProblem=Error
+org.eclipse.cdt.codan.internal.checkers.InvalidTemplateArgumentsProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.LabelStatementNotFoundProblem=Error
+org.eclipse.cdt.codan.internal.checkers.LabelStatementNotFoundProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.MemberDeclarationNotFoundProblem=Error
+org.eclipse.cdt.codan.internal.checkers.MemberDeclarationNotFoundProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.MethodResolutionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.MethodResolutionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.NamingConventionFunctionChecker=-Info
+org.eclipse.cdt.codan.internal.checkers.NamingConventionFunctionChecker.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},pattern\=>"^[a-z]",macro\=>true,exceptions\=>()}
+org.eclipse.cdt.codan.internal.checkers.NonVirtualDestructorProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.NonVirtualDestructorProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.OverloadProblem=Error
+org.eclipse.cdt.codan.internal.checkers.OverloadProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.RedeclarationProblem=Error
+org.eclipse.cdt.codan.internal.checkers.RedeclarationProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.RedefinitionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.RedefinitionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.ReturnStyleProblem=-Warning
+org.eclipse.cdt.codan.internal.checkers.ReturnStyleProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.ScanfFormatStringSecurityProblem=-Warning
+org.eclipse.cdt.codan.internal.checkers.ScanfFormatStringSecurityProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.StatementHasNoEffectProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.StatementHasNoEffectProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},macro\=>true,exceptions\=>()}
+org.eclipse.cdt.codan.internal.checkers.SuggestedParenthesisProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.SuggestedParenthesisProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},paramNot\=>false}
+org.eclipse.cdt.codan.internal.checkers.SuspiciousSemicolonProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.SuspiciousSemicolonProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},else\=>false,afterelse\=>false}
+org.eclipse.cdt.codan.internal.checkers.TypeResolutionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.TypeResolutionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
+org.eclipse.cdt.codan.internal.checkers.UnusedFunctionDeclarationProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.UnusedFunctionDeclarationProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},macro\=>true}
+org.eclipse.cdt.codan.internal.checkers.UnusedStaticFunctionProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.UnusedStaticFunctionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},macro\=>true}
+org.eclipse.cdt.codan.internal.checkers.UnusedVariableDeclarationProblem=Warning
+org.eclipse.cdt.codan.internal.checkers.UnusedVariableDeclarationProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true},macro\=>true,exceptions\=>("@(\#)","$Id")}
+org.eclipse.cdt.codan.internal.checkers.VariableResolutionProblem=Error
+org.eclipse.cdt.codan.internal.checkers.VariableResolutionProblem.params={launchModes\=>{RUN_ON_FULL_BUILD\=>true,RUN_ON_INC_BUILD\=>true,RUN_ON_FILE_OPEN\=>false,RUN_ON_FILE_SAVE\=>false,RUN_AS_YOU_TYPE\=>true,RUN_ON_DEMAND\=>true}}
diff --git a/jni/Android.mk b/jni/Android.mk
index 6cad0e2..8c132e2 100644
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -26,4 +26,67 @@ include $(CLEAR_VARS)
LOCAL_MODULE := gdx-freetype
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libgdx-freetype.so
-include $(PREBUILT_SHARED_LIBRARY)
\ No newline at end of file
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_java
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libopencv_java.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_info
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libopencv_info.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_220
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r2.2.0.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_233
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r2.3.3.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_301
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r3.0.1.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_400
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r4.0.0.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_403
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r4.0.3.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_411
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r4.1.1.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ocv_tegra_native_camera_420
+LOCAL_SRC_FILES := C:\NVPACK\OpenCV-2.4.5-Tegra-sdk-r2\sdk\native\libs\tegra3\libnative_camera_r4.2.0.so
+
+include $(PREBUILT_SHARED_LIBRARY)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 26d90ce..bb2b9a2 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -16,10 +16,13 @@
#include
#include
#include
+#include
#include "marker.hpp"
-//#define CAN_LOG
+#define CAN_LOG
+#define POINTS_PER_CALIBRATION_SAMPLE 54
+#define CALIBRATION_SAMPLES 10
#ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
@@ -27,10 +30,13 @@
#define log(TAG, MSG) (1 + 1)
#endif
-extern "C"{
-
const char * TAG = "CVPROC_NATIVE";
+extern "C"{
+
+/**
+ *
+ */
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
char codeMsg[128];
std::vector vCodes;
@@ -60,6 +66,9 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
env->ReleaseIntArrayElements(codes, _codes, 0);
}
+/**
+ *
+ */
JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
nxtar::points_vector v_points;
bool found;
@@ -91,4 +100,47 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
return (jboolean)found;
}
+/**
+ *
+ */
+JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_calibrateCameraParameters(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jlong addrMatFrame, jfloatArray points){
+ double error;
+ std::vector imagePoints;
+
+ // Get native object addresses.
+ log(TAG, "calibrateCameraParameters(): Requesting native data.");
+ cv::Mat& mIn = *(cv::Mat*)addrMatIn;
+ cv::Mat& mOut = *(cv::Mat*)addrMatOut;
+ cv::Mat& mFrame = *(cv::Mat*)addrMatFrame;
+ jfloat * _points = env->GetFloatArrayElements(points, 0);
+
+ // Prepare the image points data structure.
+ log(TAG, "calibrateCameraParameters(): Preparing image points.");
+ for(int i = 0; i < CALIBRATION_SAMPLES; i++){
+ nxtar::points_vector tempVector;
+ for(int j = 0, p = 0; j < POINTS_PER_CALIBRATION_SAMPLE; j++, p += 2){
+ tempVector.push_back(cv::Point2f(_points[p], _points[p + 1]));
+ }
+ imagePoints.push_back(tempVector);
+ }
+
+ // Get the camera parameters.
+ log(TAG, "calibrateCameraParameters(): Getting camera parameters.");
+ error = nxtar::getCameraParameters(mIn, mOut, imagePoints, mFrame.size());
+
+ // Clear the image points.
+ log(TAG, "calibrateCameraParameters(): Clearing memory.");
+ for(int i = 0; i < imagePoints.size(); i++){
+ imagePoints[i].clear();
+ }
+ imagePoints.clear();
+
+ // Release native memory.
+ log(TAG, "calibrateCameraParameters(): Releasing native data.");
+ env->ReleaseFloatArrayElements(points, _points, 0);
+
+ // Return the calibration error as calculated by OpenCV.
+ return error;
+}
+
}
diff --git a/jni/marker.cpp b/jni/marker.cpp
index 664669f..660b4dc 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -16,6 +16,7 @@
#include
#include
#include
+#include
#ifdef DESKTOP
#include
#endif
@@ -394,9 +395,9 @@ cv::Mat rotate(cv::Mat in){
cv::Mat out;
in.copyTo(out);
- for (int i=0;i(i,j)=in.at(in.cols-j-1,i);
+ for (int i = 0; i < in.rows; i++){
+ for (int j = 0; j < in.cols; j++){
+ out.at(i, j)=in.at(in.cols-j - 1, i);
}
}
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 96c40ec..408926a 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -17,6 +17,8 @@ package ve.ucv.ciens.ccg.nxtar;
import java.io.ByteArrayOutputStream;
+import org.opencv.android.BaseLoaderCallback;
+import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
@@ -45,6 +47,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = MainActivity.class.getSimpleName();
+ private static final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
private static boolean ocvOn = false;
private static Mat cameraMatrix, distortionCoeffs;
@@ -52,20 +55,23 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
private MulticastLock multicastLock;
private Handler uiHandler;
private Context uiContext;
- private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ private BaseLoaderCallback loaderCallback;
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
+ public native double calibrateCameraParameters(long camMat, long distMat, long frame, float[] calibrationPoints);
static{
- if(!OpenCVLoader.initDebug())
- ocvOn = false;
+ if(Ouya.runningOnOuya){
+ if(!OpenCVLoader.initDebug())
+ ocvOn = false;
- try{
- System.loadLibrary("cvproc");
- ocvOn = true;
- }catch(UnsatisfiedLinkError u){
- ocvOn = false;
+ try{
+ System.loadLibrary("cvproc");
+ ocvOn = true;
+ }catch(UnsatisfiedLinkError u){
+ ocvOn = false;
+ }
}
}
@@ -92,6 +98,26 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
cfg.useCompass = false;
cfg.useWakelock = true;
+ if(!ocvOn && !Ouya.runningOnOuya){
+ loaderCallback = new BaseLoaderCallback(this){
+ @Override
+ public void onManagerConnected(int status){
+ switch(status){
+ case LoaderCallbackInterface.SUCCESS:
+ System.loadLibrary("cvproc");
+ ocvOn = true;
+ break;
+ default:
+ Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
+ Gdx.app.exit();
+ break;
+ }
+ }
+ };
+
+ OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_8, this, loaderCallback);
+ }
+
initialize(new NxtARCore(this), cfg);
}
@@ -138,6 +164,10 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
////////////////////////////////////
// CVProcessor interface methods. //
////////////////////////////////////
+
+ /**
+ *
+ */
@Override
public CVMarkerData findMarkersInFrame(byte[] frame){
if(ocvOn){
@@ -174,6 +204,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
}
+ /**
+ *
+ */
@Override
public CVCalibrationData findCalibrationPattern(byte[] frame){
if(ocvOn){
@@ -210,6 +243,10 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
return null;
}
}
+
+ /**
+ *
+ */
@Override
public byte[] undistortFrame(byte[] frame){
if(ocvOn){
@@ -243,4 +280,32 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
return null;
}
}
+
+ /**
+ *
+ */
+ @Override
+ public void calibrateCamera(float[][] calibrationSamples, byte[] frame) {
+ if(ocvOn){
+ float[] calibrationPoints = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2 * ProjectConstants.CALIBRATION_SAMPLES];
+ int w = ProjectConstants.CALIBRATION_PATTERN_POINTS * 2;
+ Bitmap tFrame;
+ Mat inImg = new Mat();
+
+ for(int i = 0; i < ProjectConstants.CALIBRATION_SAMPLES; i++){
+ for(int j = 0, p = 0; j < ProjectConstants.CALIBRATION_PATTERN_POINTS; j++, p += 2){
+ calibrationPoints[p + (w * i)] = calibrationSamples[i][p];
+ calibrationPoints[(p + 1) + (w * i)] = calibrationSamples[i][p + 1];
+ }
+ }
+
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
+ Utils.bitmapToMat(tFrame, inImg);
+
+ calibrateCameraParameters(cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), inImg.getNativeObjAddr(), calibrationPoints);
+
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".calibrateCamera(): OpenCV is not ready or failed to load.");
+ }
+ }
}
From 729b21400c87ca93b2c686a049d8e407c8569802 Mon Sep 17 00:00:00 2001
From: unknown
Date: Fri, 2 May 2014 11:15:51 -0430
Subject: [PATCH 07/18] Fixed some initialization bugs.
---
jni/cv_proc.cpp | 4 +-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 46 +++++++++++++++-----
2 files changed, 38 insertions(+), 12 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index bb2b9a2..d42eaa9 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -20,14 +20,14 @@
#include "marker.hpp"
-#define CAN_LOG
+//#define CAN_LOG
#define POINTS_PER_CALIBRATION_SAMPLE 54
#define CALIBRATION_SAMPLES 10
#ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
#else
-#define log(TAG, MSG) (1 + 1)
+#define log(TAG, MSG) ;
#endif
const char * TAG = "CVPROC_NATIVE";
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 408926a..25b4291 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -56,6 +56,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
private Handler uiHandler;
private Context uiContext;
private BaseLoaderCallback loaderCallback;
+ private boolean cameraCalibrated;
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
@@ -79,8 +80,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
- cameraMatrix = new Mat();
- distortionCoeffs = new Mat();
+ cameraCalibrated = false;
if(!Ouya.runningOnOuya){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
@@ -92,13 +92,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
uiContext = this;
wifiManager = (WifiManager)getSystemService(Context.WIFI_SERVICE);
- AndroidApplicationConfiguration cfg = new AndroidApplicationConfiguration();
- cfg.useGL20 = true;
- cfg.useAccelerometer = false;
- cfg.useCompass = false;
- cfg.useWakelock = true;
-
- if(!ocvOn && !Ouya.runningOnOuya){
+ if(!Ouya.runningOnOuya){
loaderCallback = new BaseLoaderCallback(this){
@Override
public void onManagerConnected(int status){
@@ -106,6 +100,8 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
case LoaderCallbackInterface.SUCCESS:
System.loadLibrary("cvproc");
ocvOn = true;
+ cameraMatrix = new Mat();
+ distortionCoeffs = new Mat();
break;
default:
Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
@@ -115,9 +111,22 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
};
- OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_8, this, loaderCallback);
+ OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_7, this, loaderCallback);
+ }else{
+ if(!ocvOn){
+ Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
+ }else{
+ cameraMatrix = new Mat();
+ distortionCoeffs = new Mat();
+ }
}
+ AndroidApplicationConfiguration cfg = new AndroidApplicationConfiguration();
+ cfg.useGL20 = true;
+ cfg.useAccelerometer = false;
+ cfg.useCompass = false;
+ cfg.useWakelock = true;
+
initialize(new NxtARCore(this), cfg);
}
@@ -166,7 +175,15 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
////////////////////////////////////
/**
+ *
Implementation of the findMarkersInFrame method.
*
+ *
This implementation finds up to 15 markers in the input
+ * image and returns their codes and locations in the CVMarkerData
+ * structure. The markers are higlihted in the input image.
+ *
+ * @param frame The JPEG encoded input image.
+ * @return A data structure containing the processed output image, the
+ * detected marker codes and their respective locations.
*/
@Override
public CVMarkerData findMarkersInFrame(byte[] frame){
@@ -303,9 +320,18 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
Utils.bitmapToMat(tFrame, inImg);
calibrateCameraParameters(cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), inImg.getNativeObjAddr(), calibrationPoints);
+ cameraCalibrated = true;
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".calibrateCamera(): OpenCV is not ready or failed to load.");
}
}
+
+ /**
+ *
+ */
+ @Override
+ public boolean cameraIsCalibrated() {
+ return ocvOn && cameraCalibrated;
+ }
}
From a3431937d00423b73056b62fc05103a1bbf32ae2 Mon Sep 17 00:00:00 2001
From: unknown
Date: Fri, 2 May 2014 13:59:58 -0430
Subject: [PATCH 08/18] Camera calibration sucessfully ported.
---
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 25b4291..cab216c 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -319,14 +319,17 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
Utils.bitmapToMat(tFrame, inImg);
- calibrateCameraParameters(cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), inImg.getNativeObjAddr(), calibrationPoints);
+ double error = calibrateCameraParameters(cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), inImg.getNativeObjAddr(), calibrationPoints);
+
+ Gdx.app.log(TAG, CLASS_NAME + "calibrateCamera(): calibrateCameraParameters retured " + Double.toString(error));
+
cameraCalibrated = true;
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".calibrateCamera(): OpenCV is not ready or failed to load.");
}
}
-
+
/**
*
*/
From f04c5806d1bd55fd1241bc3284005b7ae25055ca Mon Sep 17 00:00:00 2001
From: unknown
Date: Mon, 5 May 2014 12:32:50 -0430
Subject: [PATCH 09/18] Added intradocumentation.
---
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 179 ++++++++++++++-----
1 file changed, 130 insertions(+), 49 deletions(-)
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index cab216c..9d4b2fa 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -43,6 +43,12 @@ import com.badlogic.gdx.backends.android.AndroidApplication;
import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration;
import com.badlogic.gdx.controllers.mappings.Ouya;
+/**
+ *
The main activity of the application.
+ *
+ *
Provides operating system services to the LibGDX platform
+ * independant code, and handles OpenCV initialization and api calls.
+ */
public class MainActivity extends AndroidApplication implements OSFunctionalityProvider, CVProcessor{
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = MainActivity.class.getSimpleName();
@@ -62,6 +68,10 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
public native double calibrateCameraParameters(long camMat, long distMat, long frame, float[] calibrationPoints);
+ /**
+ *
Static block. Tries to load OpenCV and the native method implementations
+ * statically if running on an OUYA device.
Disables the transmision and reception of multicast network messages.
+ */
@Override
public void disableMulticast(){
Gdx.app.log(TAG, CLASS_NAME + ".disableMulticast() :: Releasing multicast lock.");
@@ -190,31 +240,33 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
if(ocvOn){
int codes[] = new int[15];
Bitmap tFrame, mFrame;
-
- tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
-
Mat inImg = new Mat();
Mat outImg = new Mat();
+
+ // Decode the input image and convert it to an OpenCV matrix.
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
Utils.bitmapToMat(tFrame, inImg);
+ // Find up to 15 markers in the input image.
getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
- //Mat temp = new Mat();
- //Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);
-
+ // Encode the output image as a JPEG image.
mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
Utils.matToBitmap(outImg, mFrame);
mFrame.compress(CompressFormat.JPEG, 100, outputStream);
+ // Create the output data structure.
CVMarkerData data = new CVMarkerData();
data.outFrame = outputStream.toByteArray();
data.markerCodes = codes;
+ // Clean up memory.
tFrame.recycle();
mFrame.recycle();
outputStream.reset();
return data;
+
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not ready or failed to load.");
return null;
@@ -222,7 +274,17 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
/**
+ *
Implementation of the findCalibrationPattern method.
*
+ *
Attempts to detect a checkerboard calibration pattern in the input image.
+ * If the pattenr is found the method returns an image with the pattern
+ * highlighted and the spatial location of the calibration points in the
+ * output data structure.
+ *
+ * @param frame The JPEG encoded input image.
+ * @return A data structure containing the processed output image and the
+ * location of the calibration points. If the pattern was not found, the returnd
+ * calibration points array is null.
*/
@Override
public CVCalibrationData findCalibrationPattern(byte[] frame){
@@ -255,6 +317,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
outputStream.reset();
return data;
+
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findCalibrationPattern(): OpenCV is not ready or failed to load.");
return null;
@@ -262,44 +325,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
/**
+ *
Implementation of the calibrateCamera method.
*
- */
- @Override
- public byte[] undistortFrame(byte[] frame){
- if(ocvOn){
- byte undistortedFrame[];
- Bitmap tFrame, mFrame;
- Mat inImg = new Mat(), outImg = new Mat();
-
- // Decode the input frame and convert it to an OpenCV Matrix.
- tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
- Utils.bitmapToMat(tFrame, inImg);
-
- // Apply the undistort correction to the input frame.
- Imgproc.undistort(inImg, outImg, cameraMatrix, distortionCoeffs);
-
- // Encode the output image as a JPEG image.
- mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
- Utils.matToBitmap(outImg, mFrame);
- mFrame.compress(CompressFormat.JPEG, 100, outputStream);
-
- // Prepare the return frame.
- undistortedFrame = outputStream.toByteArray();
-
- // Clean up memory.
- tFrame.recycle();
- mFrame.recycle();
- outputStream.reset();
-
- return undistortedFrame;
- }else{
- Gdx.app.debug(TAG, CLASS_NAME + ".undistortFrame(): OpenCV is not ready or failed to load.");
- return null;
- }
- }
-
- /**
- *
+ *
Obtains the intrinsic camera parameters necesary for calibration.
*/
@Override
public void calibrateCamera(float[][] calibrationSamples, byte[] frame) {
@@ -309,6 +337,8 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
Bitmap tFrame;
Mat inImg = new Mat();
+ // Save the calibration points on a one dimensional array for easier parameter passing
+ // to the native code.
for(int i = 0; i < ProjectConstants.CALIBRATION_SAMPLES; i++){
for(int j = 0, p = 0; j < ProjectConstants.CALIBRATION_PATTERN_POINTS; j++, p += 2){
calibrationPoints[p + (w * i)] = calibrationSamples[i][p];
@@ -316,13 +346,13 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
}
+ // Decode the input image and convert it to an OpenCV matrix.
tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
Utils.bitmapToMat(tFrame, inImg);
+ // Attempt to obtain the camera parameters.
double error = calibrateCameraParameters(cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), inImg.getNativeObjAddr(), calibrationPoints);
-
Gdx.app.log(TAG, CLASS_NAME + "calibrateCamera(): calibrateCameraParameters retured " + Double.toString(error));
-
cameraCalibrated = true;
}else{
@@ -330,8 +360,59 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
}
+
/**
+ *
Implementation of the undistorFrame method.
*
+ *
Removes camera lens distortion from the input image using the
+ * camera parameters obtained by the calibrateCamera method.
+ *
+ * @return A JPEG encoded image that is the input image after distortion correction. If the
+ * camera has not been calibrated or OpenCV failed to load returns null.
+ */
+ @Override
+ public byte[] undistortFrame(byte[] frame){
+ if(ocvOn){
+ if(cameraCalibrated){
+ byte undistortedFrame[];
+ Bitmap tFrame, mFrame;
+ Mat inImg = new Mat(), outImg = new Mat();
+
+ // Decode the input frame and convert it to an OpenCV Matrix.
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
+ Utils.bitmapToMat(tFrame, inImg);
+
+ // Apply the undistort correction to the input frame.
+ Imgproc.undistort(inImg, outImg, cameraMatrix, distortionCoeffs);
+
+ // Encode the output image as a JPEG image.
+ mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
+ Utils.matToBitmap(outImg, mFrame);
+ mFrame.compress(CompressFormat.JPEG, 100, outputStream);
+
+ // Prepare the return frame.
+ undistortedFrame = outputStream.toByteArray();
+
+ // Clean up memory.
+ tFrame.recycle();
+ mFrame.recycle();
+ outputStream.reset();
+
+ return undistortedFrame;
+
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".undistortFrame(): Camera has not been calibrated.");
+ return null;
+ }
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".undistortFrame(): OpenCV is not ready or failed to load.");
+ return null;
+ }
+ }
+
+ /**
+ *
Indicates if OpenCV has been sucessfully initialized and used
+ * to obtain the camera parameters for calibration.
*/
@Override
public boolean cameraIsCalibrated() {
From 31dd89f30eed0357d1e9c28977e7f41730abda8b Mon Sep 17 00:00:00 2001
From: unknown
Date: Tue, 6 May 2014 16:36:46 -0430
Subject: [PATCH 10/18] Assorted edits. Created new shaders.
---
.../shaders/bckg/{bckg.frag => bckg_frag.glsl} | 0
.../shaders/bckg/{bckg.vert => bckg_vert.glsl} | 0
.../singleDiffuseLight_frag.glsl | 17 +++++++++++++++++
.../singleDiffuseLight_vert.glsl | 9 +++++++++
jni/cv_proc.cpp | 2 +-
jni/marker.cpp | 2 +-
jni/marker.hpp | 4 ++--
7 files changed, 30 insertions(+), 4 deletions(-)
rename assets/shaders/bckg/{bckg.frag => bckg_frag.glsl} (100%)
rename assets/shaders/bckg/{bckg.vert => bckg_vert.glsl} (100%)
create mode 100644 assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
create mode 100644 assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
diff --git a/assets/shaders/bckg/bckg.frag b/assets/shaders/bckg/bckg_frag.glsl
similarity index 100%
rename from assets/shaders/bckg/bckg.frag
rename to assets/shaders/bckg/bckg_frag.glsl
diff --git a/assets/shaders/bckg/bckg.vert b/assets/shaders/bckg/bckg_vert.glsl
similarity index 100%
rename from assets/shaders/bckg/bckg.vert
rename to assets/shaders/bckg/bckg_vert.glsl
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
new file mode 100644
index 0000000..d1cb398
--- /dev/null
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
@@ -0,0 +1,17 @@
+#ifdef GL_ES
+precision mediump float;
+#endif
+
+// Light color.
+uniform vec3 u_lightColor;
+// Diffuse surface color.
+uniform vec3 u_diffColor;
+// Specular color.
+uniform vec3 u_specColor;
+// Ambient light color.
+uniform vec3 u_ambColor;
+
+void main(){
+ // TODO: Implement per pixel diffuse lighting.
+ gl_FragColor = vec4(u_diffColor, 1.0);
+}
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
new file mode 100644
index 0000000..7098fe7
--- /dev/null
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
@@ -0,0 +1,9 @@
+// Model-view matrix.
+uniform mat4 u_projTrans;
+
+// Vertex position in world coordinates.
+attribute vec4 a_position;
+
+void main(){
+ gl_Position = u_projTrans * a_position;
+}
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index d42eaa9..5d6b631 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -143,4 +143,4 @@ JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_calibrateCame
return error;
}
-}
+} // extern "C"
diff --git a/jni/marker.cpp b/jni/marker.cpp
index 660b4dc..f2a4618 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -500,4 +500,4 @@ float perimeter(points_vector & p){
Marker::~Marker(){
points.clear();
}
-}
+} // namespace nxtar
diff --git a/jni/marker.hpp b/jni/marker.hpp
index 001e825..2f0c02b 100644
--- a/jni/marker.hpp
+++ b/jni/marker.hpp
@@ -51,6 +51,6 @@ bool findCalibrationPattern(points_vector &, cv::Mat &);
*/
double getCameraParameters(cv::Mat &, cv::Mat &, std::vector &, cv::Size);
-}
+} // namespace nxtar
-#endif
+#endif // MARKER_HPP
From e5b9bd681b7de81f8cb82b93a5f7ce2ebd08b98e Mon Sep 17 00:00:00 2001
From: unknown
Date: Tue, 6 May 2014 18:28:18 -0430
Subject: [PATCH 11/18] Added more comments.
---
jni/cv_proc.cpp | 36 +++++--
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 101 ++++++++++++++++---
2 files changed, 112 insertions(+), 25 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 5d6b631..3ecd1ff 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -20,11 +20,11 @@
#include "marker.hpp"
-//#define CAN_LOG
+//#define LOG_ENABLED
#define POINTS_PER_CALIBRATION_SAMPLE 54
#define CALIBRATION_SAMPLES 10
-#ifdef CAN_LOG
+#ifdef LOG_ENABLED
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
#else
#define log(TAG, MSG) ;
@@ -35,65 +35,79 @@ const char * TAG = "CVPROC_NATIVE";
extern "C"{
/**
- *
+ * JNI wrapper around the nxtar::getAllMarkers() method.
*/
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
char codeMsg[128];
std::vector vCodes;
+ cv::Mat temp;
log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
+ // Get the native object addresses.
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jint * _codes = env->GetIntArrayElements(codes, 0);
- cv::Mat temp;
+ // Convert the input image to the BGR color space.
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
cv::cvtColor(myuv, temp, CV_RGB2BGR);
+ // Find all markers in the input image.
log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
nxtar::getAllMarkers(vCodes, temp);
+ // Copy the marker codes to the output vector.
log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(int i = 0; i < vCodes.size() && i < 15; i++){
_codes[i] = (jint)vCodes[i];
}
vCodes.clear();
+ // Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
+ // Release native data.
log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
}
/**
- *
+ * JNI wrapper around the nxtar::findCalibrationPattern() method.
*/
JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
nxtar::points_vector v_points;
bool found;
+ cv::Mat temp;
log(TAG, "findCalibrationPattern(): Requesting native data.");
+ // Get the native object addresses.
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
- cv::Mat temp;
+ // Convert the input image to the BGR color space.
log(TAG, "findCalibrationPattern(): Converting color space before processing.");
cv::cvtColor(myuv, temp, CV_RGB2BGR);
+ // Find the calibration points in the input image.
log(TAG, "findCalibrationPattern(): Finding calibration pattern.");
found = nxtar::findCalibrationPattern(v_points, temp);
- log(TAG, "findCalibrationPattern(): Copying calibration points.");
- for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
- _points[p] = (jfloat)v_points[i].x;
- _points[p + 1] = (jfloat)v_points[i].y;
+ // If the points were found then save them to the output array.
+ if(found){
+ log(TAG, "findCalibrationPattern(): Copying calibration points.");
+ for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
+ _points[p] = (jfloat)v_points[i].x;
+ _points[p + 1] = (jfloat)v_points[i].y;
+ }
}
+ // Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
+ // Release native data.
log(TAG, "findCalibrationPattern(): Releasing native data.");
env->ReleaseFloatArrayElements(points, _points, 0);
@@ -101,7 +115,7 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
}
/**
- *
+ * JNI wrapper around the nxtar::getCameraParameters() method.
*/
JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_calibrateCameraParameters(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jlong addrMatFrame, jfloatArray points){
double error;
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 9d4b2fa..7c12214 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -50,23 +50,95 @@ import com.badlogic.gdx.controllers.mappings.Ouya;
* independant code, and handles OpenCV initialization and api calls.
*/
public class MainActivity extends AndroidApplication implements OSFunctionalityProvider, CVProcessor{
+ /**
+ * Tag used for logging.
+ */
private static final String TAG = "NXTAR_ANDROID_MAIN";
+
+ /**
+ * Class name used for logging.
+ */
private static final String CLASS_NAME = MainActivity.class.getSimpleName();
+ /**
+ * Output stream used to codify images as JPEG using Android's Bitmap class.
+ */
private static final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- private static boolean ocvOn = false;
- private static Mat cameraMatrix, distortionCoeffs;
+ /**
+ * Indicates if OpenCV was initialized sucessfully.
+ */
+ private static boolean ocvOn = false;
+
+ /**
+ * Intrinsic camera matrix.
+ */
+ private static Mat cameraMatrix;
+
+ /**
+ * Distortion coeffitients matrix.
+ */
+ private static Mat distortionCoeffs;
+
+ /**
+ * Used to set and release multicast locks.
+ */
private WifiManager wifiManager;
+
+ /**
+ * Used to maintain the multicast lock during the service discovery procedure.
+ */
private MulticastLock multicastLock;
+
+ /**
+ * Handler used for requesting toast messages from the core LibGDX code.
+ */
private Handler uiHandler;
+
+ /**
+ * User interface context used to show the toast messages.
+ */
private Context uiContext;
+
+ /**
+ * OpenCV asynchronous initializer callback for mobile devices.
+ */
private BaseLoaderCallback loaderCallback;
+
+ /**
+ * Indicates if the current video streaming camera has been calibrated.
+ */
private boolean cameraCalibrated;
- public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
- public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
- public native double calibrateCameraParameters(long camMat, long distMat, long frame, float[] calibrationPoints);
+ /**
+ *
Wrapper for the getAllMarkers native function.
+ *
+ * @param inMat INPUT. The image to analize.
+ * @param outMat OUTPUT. The image with the markers highlighted.
+ * @param codes OUTPUT. The codes for each marker detected. Must be 15 elements long.
+ */
+ private native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
+
+ /**
+ *
Wrapper for the findCalibrationPattern native function.
+ *
+ * @param inMat INPUT. The image to analize.
+ * @param outMat OUTPUT. The image with the calibration pattern highlighted.
+ * @param points OUTPUT. The spatial location of the calibration points if found.
+ * @return True if the calibration pattern was found. False otherwise.
+ */
+ private native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
+
+ /**
+ *
Wrapper around the getCameraParameters native function.
+ *
+ * @param camMat OUTPUT. The intrinsic camera matrix.
+ * @param distMat OUTPUT. The distortion coeffitients matrix.
+ * @param frame INPUT. A sample input image from the camera to calibrate.
+ * @param calibrationPoints INPUT. The calibration points of all samples.
+ * @return The calibration error as returned by OpenCV.
+ */
+ private native double calibrateCameraParameters(long camMat, long distMat, long frame, float[] calibrationPoints);
/**
*
Static block. Tries to load OpenCV and the native method implementations
@@ -90,7 +162,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
*
Initializes this activity
*
*
This method handles the initialization of LibGDX and OpenCV. OpenCV is
- * loaded the asynchronous method if the devices is not an OUYA console.
+ * loaded the asynchronous method if the devices is not an OUYA console.
+ *
+ * @param savedInstanceState The application state if it was saved in a previous run.
*/
@Override
public void onCreate(Bundle savedInstanceState){
@@ -126,6 +200,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
cameraMatrix = new Mat();
distortionCoeffs = new Mat();
break;
+
default:
Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
ocvOn = false;
@@ -164,9 +239,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
////////////////////////////////////////////////
/**
- *
Implementation of the showShortToast method.
- *
*
Shows a short message on screen using Android's toast mechanism.
+ *
+ * @param msg The message to show.
*/
@Override
public void showShortToast(final String msg){
@@ -179,9 +254,9 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
/**
- *
Implementation of the showLongToast method.
- *
*
Shows a long message on screen using Android's toast mechanism.
+ *
+ * @param msg The message to show.
*/
@Override
public void showLongToast(final String msg){
@@ -194,8 +269,6 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
/**
- *
Implementation of the enableMulticast method.
- *
*
Enable the transmision and reception of multicast network messages.
*/
@Override
@@ -207,8 +280,6 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
}
/**
- *
Implementation of the disableMulticast method.
- *
*
Disables the transmision and reception of multicast network messages.
*/
@Override
@@ -413,6 +484,8 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
/**
*
Indicates if OpenCV has been sucessfully initialized and used
* to obtain the camera parameters for calibration.
+ *
+ * @return True if and only if OpenCV initialized succesfully and calibrateCamera has been called previously.
*/
@Override
public boolean cameraIsCalibrated() {
From 51183a2cf10bfb8b161d4672eff56ef1a75dcf52 Mon Sep 17 00:00:00 2001
From: unknown
Date: Wed, 7 May 2014 16:42:14 -0430
Subject: [PATCH 12/18] Added vertex attributes.
---
.../singleDiffuseLight/singleDiffuseLight_frag.glsl | 13 +++----------
.../singleDiffuseLight/singleDiffuseLight_vert.glsl | 7 +++++++
2 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
index d1cb398..aed8177 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
@@ -2,16 +2,9 @@
precision mediump float;
#endif
-// Light color.
-uniform vec3 u_lightColor;
-// Diffuse surface color.
-uniform vec3 u_diffColor;
-// Specular color.
-uniform vec3 u_specColor;
-// Ambient light color.
-uniform vec3 u_ambColor;
+// Fragment color received from the vertex shader.
+varying vec4 v_color;
void main(){
- // TODO: Implement per pixel diffuse lighting.
- gl_FragColor = vec4(u_diffColor, 1.0);
+ gl_FragColor = v_color;
}
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
index 7098fe7..2a2b274 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
@@ -4,6 +4,13 @@ uniform mat4 u_projTrans;
// Vertex position in world coordinates.
attribute vec4 a_position;
+// Vertex color.
+attribute vec4 a_color;
+
+// Vertex color to pass to the fragment shader.
+varying vec4 v_color;
+
void main(){
+ v_color = a_color;
gl_Position = u_projTrans * a_position;
}
From caa06310040ef18dd4374e93a187eef963a9fced Mon Sep 17 00:00:00 2001
From: unknown
Date: Thu, 8 May 2014 16:22:11 -0430
Subject: [PATCH 13/18] Single light textureless phong shading complete.
---
.../singleDiffuseLight_frag.glsl | 43 ++++++++++++++++++-
.../singleDiffuseLight_vert.glsl | 17 +++++++-
2 files changed, 58 insertions(+), 2 deletions(-)
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
index aed8177..ef07f44 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
@@ -2,9 +2,50 @@
precision mediump float;
#endif
+// Camera world space position.
+uniform vec3 u_cameraPos;
+
+// Light source position
+uniform vec4 u_lightPos;
+
+// Diffuse light color.
+uniform vec4 u_lightDiffuse;
+
+// Ambient light color.
+uniform vec4 u_ambient;
+
+// Shininess.
+uniform float u_shiny;
+
+// Fragment position.
+varying vec4 v_position;
+
+// Fragment normal.
+varying vec3 v_normal;
+
// Fragment color received from the vertex shader.
varying vec4 v_color;
void main(){
- gl_FragColor = v_color;
+ vec3 normal = normalize(v_normal);
+ vec3 lightVector = normalize(u_lightPos.xyz - v_position.xyz);
+ vec3 eyeVector = normalize(u_cameraPos.xyz - v_position.xyz);
+ vec3 reflectedVector = normalize(-reflect(lightVector, normal));
+
+ // Ambient Term.
+ vec4 ambient = u_ambient;
+
+ // Diffuse Term.
+ vec4 diffuse = u_lightDiffuse * max(dot(normal, lightVector), 0.0);
+ diffuse = clamp(diffuse, 0.0, 1.0);
+
+ // Specular Term:
+ vec4 specular = vec4(1.0) * pow(max(dot(reflectedVector, eyeVector), 0.0), 0.3 * u_shiny);
+ specular = clamp(specular, 0.0, 1.0);
+
+ // Aggregate light color.
+ vec4 lightColor = vec4(ambient.rgb + diffuse.rgb + specular.rgb, 1.0);
+
+ // Final color.
+ gl_FragColor = clamp(lightColor * v_color, 0.0, 1.0);
}
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
index 2a2b274..1598b8f 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
@@ -1,16 +1,31 @@
// Model-view matrix.
uniform mat4 u_projTrans;
+// Normal matrix.
+uniform mat4 u_normalMat;
+
// Vertex position in world coordinates.
attribute vec4 a_position;
+// Vertex normal.
+attribute vec4 a_normal;
+
// Vertex color.
attribute vec4 a_color;
+// Vertex position to pass to the fragment shader.
+varying vec4 v_position;
+
+// Vertex normal to pass to the fragment shader.
+varying vec3 v_normal;
+
// Vertex color to pass to the fragment shader.
varying vec4 v_color;
void main(){
+ v_position = u_projTrans * a_position;
v_color = a_color;
- gl_Position = u_projTrans * a_position;
+ v_normal = vec3(u_normalMat * a_normal);
+
+ gl_Position = v_position;
}
From 8d13493c2afe04d4d414fa452e7c0c2305c66e9f Mon Sep 17 00:00:00 2001
From: unknown
Date: Fri, 9 May 2014 10:47:42 -0430
Subject: [PATCH 14/18] Moved the diffuse color calculation to the vertex
shader.
---
.../singleDiffuseLight_frag.glsl | 50 +++++++++-------
.../singleDiffuseLight_vert.glsl | 59 +++++++++++++++----
2 files changed, 74 insertions(+), 35 deletions(-)
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
index ef07f44..e5ba28f 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
@@ -1,16 +1,23 @@
+/*
+ * Copyright (C) 2014 Miguel Angel Astor Romero
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
#ifdef GL_ES
precision mediump float;
#endif
-// Camera world space position.
-uniform vec3 u_cameraPos;
-
-// Light source position
-uniform vec4 u_lightPos;
-
-// Diffuse light color.
-uniform vec4 u_lightDiffuse;
-
// Ambient light color.
uniform vec4 u_ambient;
@@ -26,25 +33,24 @@ varying vec3 v_normal;
// Fragment color received from the vertex shader.
varying vec4 v_color;
+// Fragment shaded diffuse color.
+varying vec4 v_diffuse;
+
+varying vec3 v_lightVector;
+varying vec3 v_eyeVector;
+varying vec3 v_reflectedVector;
+
void main(){
- vec3 normal = normalize(v_normal);
- vec3 lightVector = normalize(u_lightPos.xyz - v_position.xyz);
- vec3 eyeVector = normalize(u_cameraPos.xyz - v_position.xyz);
- vec3 reflectedVector = normalize(-reflect(lightVector, normal));
-
- // Ambient Term.
- vec4 ambient = u_ambient;
-
- // Diffuse Term.
- vec4 diffuse = u_lightDiffuse * max(dot(normal, lightVector), 0.0);
- diffuse = clamp(diffuse, 0.0, 1.0);
+ // Normalize the input varyings.
+ vec3 lightVector = normalize(v_lightVector);
+ vec3 eyeVector = normalize(v_eyeVector);
+ vec3 reflectedVector = normalize(v_reflectedVector);
// Specular Term:
vec4 specular = vec4(1.0) * pow(max(dot(reflectedVector, eyeVector), 0.0), 0.3 * u_shiny);
- specular = clamp(specular, 0.0, 1.0);
// Aggregate light color.
- vec4 lightColor = vec4(ambient.rgb + diffuse.rgb + specular.rgb, 1.0);
+ vec4 lightColor = clamp(vec4(u_ambient.rgb + v_diffuse.rgb + specular.rgb, 1.0), 0.0, 1.0);
// Final color.
gl_FragColor = clamp(lightColor * v_color, 0.0, 1.0);
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
index 1598b8f..6301ac7 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
@@ -1,8 +1,30 @@
+/*
+ * Copyright (C) 2014 Miguel Angel Astor Romero
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
// Model-view matrix.
uniform mat4 u_projTrans;
-// Normal matrix.
-uniform mat4 u_normalMat;
+// Light source position
+uniform vec4 u_lightPos;
+
+// Diffuse light color.
+uniform vec4 u_lightDiffuse;
+
+// Camera world space position.
+uniform vec3 u_cameraPos;
// Vertex position in world coordinates.
attribute vec4 a_position;
@@ -13,19 +35,30 @@ attribute vec4 a_normal;
// Vertex color.
attribute vec4 a_color;
-// Vertex position to pass to the fragment shader.
-varying vec4 v_position;
-
-// Vertex normal to pass to the fragment shader.
-varying vec3 v_normal;
-
// Vertex color to pass to the fragment shader.
varying vec4 v_color;
-void main(){
- v_position = u_projTrans * a_position;
- v_color = a_color;
- v_normal = vec3(u_normalMat * a_normal);
+// Diffuse shaded color to pass to the fragment shader.
+varying vec4 v_diffuse;
- gl_Position = v_position;
+// The vector from the vertex to the light source.
+varying vec3 v_lightVector;
+
+// The vector from the vertex to the camera.
+varying vec3 v_eyeVector;
+
+// The light vector reflected around the vertex normal.
+varying vec3 v_reflectedVector;
+
+void main(){
+ // Set the varyings.
+ v_lightVector = normalize(u_lightPos.xyz - a_position.xyz);
+ v_eyeVector = normalize(u_cameraPos.xyz - a_position.xyz);
+ v_reflectedVector = normalize(-reflect(v_lightVector, a_normal.xyz));
+ v_color = a_color;
+
+ // Diffuse Term.
+ v_diffuse = u_lightDiffuse * max(dot(a_normal.xyz, v_lightVector), 0.0);
+
+ gl_Position = u_projTrans * a_position;
}
From ea33f1e725436f87d222151b1458d10830e28893 Mon Sep 17 00:00:00 2001
From: unknown
Date: Mon, 12 May 2014 11:36:55 -0430
Subject: [PATCH 15/18] Added specular color and geometric transformations.
---
.../singleDiffuseLight/singleDiffuseLight_frag.glsl | 5 ++++-
.../singleDiffuseLight/singleDiffuseLight_vert.glsl | 12 +++++++++---
2 files changed, 13 insertions(+), 4 deletions(-)
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
index e5ba28f..04fc4c6 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_frag.glsl
@@ -21,6 +21,9 @@ precision mediump float;
// Ambient light color.
uniform vec4 u_ambient;
+// Specular light color.
+uniform vec4 u_specular;
+
// Shininess.
uniform float u_shiny;
@@ -47,7 +50,7 @@ void main(){
vec3 reflectedVector = normalize(v_reflectedVector);
// Specular Term:
- vec4 specular = vec4(1.0) * pow(max(dot(reflectedVector, eyeVector), 0.0), 0.3 * u_shiny);
+ vec4 specular = u_specular * pow(max(dot(reflectedVector, eyeVector), 0.0), 0.3 * u_shiny);
// Aggregate light color.
vec4 lightColor = clamp(vec4(u_ambient.rgb + v_diffuse.rgb + specular.rgb, 1.0), 0.0, 1.0);
diff --git a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
index 6301ac7..f125146 100644
--- a/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
+++ b/assets/shaders/singleDiffuseLight/singleDiffuseLight_vert.glsl
@@ -17,6 +17,9 @@
// Model-view matrix.
uniform mat4 u_projTrans;
+// The world space geometric transformation to apply to this vertex.
+uniform mat4 u_geomTrans;
+
// Light source position
uniform vec4 u_lightPos;
@@ -51,14 +54,17 @@ varying vec3 v_eyeVector;
varying vec3 v_reflectedVector;
void main(){
+ // Apply the geometric transformation to the original position of the vertex.
+ vec4 transformedPosition = u_geomTrans * a_position;
+
// Set the varyings.
- v_lightVector = normalize(u_lightPos.xyz - a_position.xyz);
- v_eyeVector = normalize(u_cameraPos.xyz - a_position.xyz);
+ v_lightVector = normalize(u_lightPos.xyz - transformedPosition.xyz);
+ v_eyeVector = normalize(u_cameraPos.xyz - transformedPosition.xyz);
v_reflectedVector = normalize(-reflect(v_lightVector, a_normal.xyz));
v_color = a_color;
// Diffuse Term.
v_diffuse = u_lightDiffuse * max(dot(a_normal.xyz, v_lightVector), 0.0);
- gl_Position = u_projTrans * a_position;
+ gl_Position = u_projTrans * transformedPosition;
}
From b0151e85e9a48364f3e78f8f4bfb9b6cd7293596 Mon Sep 17 00:00:00 2001
From: unknown
Date: Tue, 13 May 2014 18:30:32 -0430
Subject: [PATCH 16/18] Added marker pose estimation. Not tested yet.
---
jni/cv_proc.cpp | 68 +++++++++-----
jni/marker.cpp | 40 ++++++--
jni/marker.hpp | 24 ++++-
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 97 +++++++++++++-------
4 files changed, 166 insertions(+), 63 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 3ecd1ff..9c38d88 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -21,6 +21,9 @@
#include "marker.hpp"
//#define LOG_ENABLED
+#define MAX_MARKERS 5
+#define TRANSLATION_VECTOR_POINTS 3
+#define ROTATION_MATRIX_SIZE 9
#define POINTS_PER_CALIBRATION_SAMPLE 54
#define CALIBRATION_SAMPLES 10
@@ -37,17 +40,21 @@ extern "C"{
/**
* JNI wrapper around the nxtar::getAllMarkers() method.
*/
-JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
- char codeMsg[128];
- std::vector vCodes;
- cv::Mat temp;
-
- log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
+JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes, jlong camMat, jlong distMat, jfloatArray translations, jfloatArray rotations){
+ char codeMsg[128];
+ std::vector vCodes;
+ nxtar::markers_vector vMarkers;
+ cv::Mat temp;
// Get the native object addresses.
- cv::Mat& myuv = *(cv::Mat*)addrMatIn;
- cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+ log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
+ cv::Mat& myuv = *(cv::Mat*)addrMatIn;
+ cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+ cv::Mat& mCam = *(cv::Mat*)camMat;
+ cv::Mat& mDist = *(cv::Mat*)distMat;
jint * _codes = env->GetIntArrayElements(codes, 0);
+ jfloat * _tr = env->GetFloatArrayElements(translations, 0);
+ jfloat * _rt = env->GetFloatArrayElements(rotations, 0);
// Convert the input image to the BGR color space.
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
@@ -55,14 +62,32 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
// Find all markers in the input image.
log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
- nxtar::getAllMarkers(vCodes, temp);
+ nxtar::getAllMarkers(vMarkers, temp);
+ nxtar::estimateMarkerPosition(vMarkers, mCam, mDist);
// Copy the marker codes to the output vector.
log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
- for(int i = 0; i < vCodes.size() && i < 15; i++){
- _codes[i] = (jint)vCodes[i];
+ for(size_t i = 0; i < vMarkers.size() && i < MAX_MARKERS; i++){
+ _codes[i] = (jint)vMarkers[i].code;
}
- vCodes.clear();
+
+ // Copy the geometric transformations to the output vectors.
+ for(int i = 0, p = 0; i < vMarkers.size(); i++, p += 3){
+ _tr[p ] = vMarkers[i].translation.at(0);
+ _tr[p + 1] = vMarkers[i].translation.at(1);
+ _tr[p + 2] = vMarkers[i].translation.at(2);
+ }
+
+ for(int k = 0; k < vMarkers.size(); k++){
+ for(int row = 0; row < 3; row++){
+ for(int col = 0; col < 3; col++){
+ _rt[col + (row * 3) + (9 * k)] = vMarkers[k].rotation.at(row, col);
+ }
+ }
+ }
+
+ // Clear marker memory.
+ vMarkers.clear();
// Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
@@ -70,6 +95,8 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
// Release native data.
log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
+ env->ReleaseFloatArrayElements(translations, _tr, 0);
+ env->ReleaseFloatArrayElements(rotations, _rt, 0);
}
/**
@@ -77,14 +104,13 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
*/
JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
nxtar::points_vector v_points;
- bool found;
- cv::Mat temp;
-
- log(TAG, "findCalibrationPattern(): Requesting native data.");
+ bool found;
+ cv::Mat temp;
// Get the native object addresses.
- cv::Mat& myuv = *(cv::Mat*)addrMatIn;
- cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
+ log(TAG, "findCalibrationPattern(): Requesting native data.");
+ cv::Mat& myuv = *(cv::Mat*)addrMatIn;
+ cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
// Convert the input image to the BGR color space.
@@ -118,13 +144,13 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
* JNI wrapper around the nxtar::getCameraParameters() method.
*/
JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_calibrateCameraParameters(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jlong addrMatFrame, jfloatArray points){
- double error;
+ double error;
std::vector imagePoints;
// Get native object addresses.
log(TAG, "calibrateCameraParameters(): Requesting native data.");
- cv::Mat& mIn = *(cv::Mat*)addrMatIn;
- cv::Mat& mOut = *(cv::Mat*)addrMatOut;
+ cv::Mat& mIn = *(cv::Mat*)addrMatIn;
+ cv::Mat& mOut = *(cv::Mat*)addrMatOut;
cv::Mat& mFrame = *(cv::Mat*)addrMatFrame;
jfloat * _points = env->GetFloatArrayElements(points, 0);
diff --git a/jni/marker.cpp b/jni/marker.cpp
index f2a4618..ba51285 100644
--- a/jni/marker.cpp
+++ b/jni/marker.cpp
@@ -27,7 +27,6 @@ namespace nxtar{
typedef std::vector points_vector_3D;
typedef std::vector > contours_vector;
-typedef std::vector markers_vector;
/******************************************************************************
* PRIVATE CONSTANTS *
@@ -92,16 +91,15 @@ void warpMarker(Marker &, cv::Mat &, cv::Mat &);
* PUBLIC API *
******************************************************************************/
-void getAllMarkers(std::vector & codes, cv::Mat & img){
+void getAllMarkers(markers_vector & valid_markers, cv::Mat & img){
cv::Mat gray, thresh, cont, mark;
contours_vector contours;
markers_vector markers;
- markers_vector valid_markers;
#ifdef DESKTOP
std::ostringstream oss;
#endif
- codes.clear();
+ valid_markers.clear();
// Find all marker candidates in the input image.
// 1) First, convert the image to grayscale.
@@ -147,7 +145,6 @@ void getAllMarkers(std::vector & codes, cv::Mat & img){
// Fix the detected corners to better approximate the markers. And
// push their codes to the output vector.
cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
- codes.push_back(valid_markers[i].code);
}
// Render the detected markers on top of the input image.
@@ -158,7 +155,6 @@ void getAllMarkers(std::vector & codes, cv::Mat & img){
// Clear the local vectors.
markers.clear();
contours.clear();
- valid_markers.clear();
}
bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
@@ -204,6 +200,35 @@ double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::
return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
}
+void estimateMarkerPosition(markers_vector & markers, cv::Mat & camMatrix, cv::Mat & distCoeff){
+ cv::Mat rVec, rAux, tAux;
+ cv::Mat_ tVec, rotation(3,3);
+ points_vector_3D objectPoints;
+
+ // Assemble a unitary CCW oriented reference polygon.
+ objectPoints.push_back(cv::Point3f(-0.5f, -0.5f, 0.0f));
+ objectPoints.push_back(cv::Point3f(-0.5f, 0.5f, 0.0f));
+ objectPoints.push_back(cv::Point3f( 0.5f, 0.5f, 0.0f));
+ objectPoints.push_back(cv::Point3f( 0.5f, -0.5f, 0.0f));
+
+ for(size_t i = 0; i < markers.size(); i++){
+ // Obtain the translation and rotation vectors.
+ cv::solvePnP(objectPoints, markers[i].points, camMatrix, distCoeff, rAux, tAux);
+
+ // Convert the obtained vectors to float.
+ rAux.convertTo(rVec, CV_32F);
+ tAux.convertTo(tVec, CV_32F);
+
+ // Convert the rotation vector to a rotation matrix.
+ cv::Rodrigues(rVec, rotation);
+
+ // Make the rotation and translation relative to the "camera" and save
+ // the results to the output marker.
+ markers[i].rotation = cv::Mat(rotation.t());
+ markers[i].translation = cv::Mat(-tVec);
+ }
+}
+
/******************************************************************************
* PRIVATE HELPER FUNCTIONS *
******************************************************************************/
@@ -357,6 +382,7 @@ void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
/**
* Calculate the hamming distance of a 5x5 bit matrix.
+ * Function by Daniel Lelis Baggio for "Mastering OpenCV with Practical Computer Vision Projects".
*/
int hammDistMarker(cv::Mat bits){
int ids[4][5] = {
@@ -397,7 +423,7 @@ cv::Mat rotate(cv::Mat in){
in.copyTo(out);
for (int i = 0; i < in.rows; i++){
for (int j = 0; j < in.cols; j++){
- out.at(i, j)=in.at(in.cols-j - 1, i);
+ out.at(i, j) = in.at(in.cols-j - 1, i);
}
}
diff --git a/jni/marker.hpp b/jni/marker.hpp
index 2f0c02b..2bae666 100644
--- a/jni/marker.hpp
+++ b/jni/marker.hpp
@@ -22,20 +22,30 @@
namespace nxtar{
-typedef std::vector points_vector;
+class Marker;
+typedef std::vector points_vector;
+typedef std::vector markers_vector;
+
+/**
+ * A class representing a marker with the geometric transformations needed to
+ * render something on top of it.
+ */
class Marker{
public:
~Marker();
+
+ int code;
points_vector points;
- int code;
+ cv::Mat translation;
+ cv::Mat rotation;
};
/**
* Detect all 5x5 markers in the input image and return their codes in the
* output vector.
*/
-void getAllMarkers(std::vector &, cv::Mat &);
+void getAllMarkers(markers_vector &, cv::Mat &);
/**
* Find a chessboard calibration pattern in the input image. Returns true
@@ -51,6 +61,14 @@ bool findCalibrationPattern(points_vector &, cv::Mat &);
*/
double getCameraParameters(cv::Mat &, cv::Mat &, std::vector &, cv::Size);
+/**
+ * Obtains the necesary geometric transformations necessary to move a reference
+ * unitary polygon to the position and rotation of the markers passed as input.
+ * The obtained transformations are given relative to a camera centered in the
+ * origin and are saved inside the input markers.
+ */
+void estimateMarkerPosition(markers_vector &, cv::Mat &, cv::Mat &);
+
} // namespace nxtar
#endif // MARKER_HPP
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index 7c12214..e777b79 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -24,8 +24,8 @@ import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
-import ve.ucv.ciens.ccg.nxtar.interfaces.CVProcessor;
-import ve.ucv.ciens.ccg.nxtar.interfaces.OSFunctionalityProvider;
+import ve.ucv.ciens.ccg.nxtar.interfaces.AndroidFunctionalityWrapper;
+import ve.ucv.ciens.ccg.nxtar.interfaces.ImageProcessor;
import ve.ucv.ciens.ccg.nxtar.utils.ProjectConstants;
import android.content.Context;
import android.content.pm.ActivityInfo;
@@ -42,6 +42,8 @@ import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.backends.android.AndroidApplication;
import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration;
import com.badlogic.gdx.controllers.mappings.Ouya;
+import com.badlogic.gdx.math.Matrix3;
+import com.badlogic.gdx.math.Vector3;
/**
*
Provides operating system services to the LibGDX platform
* independant code, and handles OpenCV initialization and api calls.
*/
-public class MainActivity extends AndroidApplication implements OSFunctionalityProvider, CVProcessor{
+public class MainActivity extends AndroidApplication implements AndroidFunctionalityWrapper, ImageProcessor{
/**
* Tag used for logging.
*/
@@ -115,9 +117,13 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
*
* @param inMat INPUT. The image to analize.
* @param outMat OUTPUT. The image with the markers highlighted.
- * @param codes OUTPUT. The codes for each marker detected. Must be 15 elements long.
+ * @param codes OUTPUT. The codes for each marker detected. Must be {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} elements long.
+ * @param camMat INPUT. The intrinsic camera matrix.
+ * @param distMat INPUT. The distortion coefficients of the camera.
+ * @param translations OUTPUT. The markers pose translations. Must be {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} * 3 elements long.
+ * @param rotations OUTPUT. The markers pose rotations. Must be {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} * 9 elements long.
*/
- private native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
+ private native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes, long camMat, long distMat, float[] translations, float[] rotations);
/**
*
Wrapper for the findCalibrationPattern native function.
@@ -298,7 +304,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
/**
*
Implementation of the findMarkersInFrame method.
*
- *
This implementation finds up to 15 markers in the input
+ *
This implementation finds up to {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} markers in the input
* image and returns their codes and locations in the CVMarkerData
* structure. The markers are higlihted in the input image.
*
@@ -307,37 +313,64 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
* detected marker codes and their respective locations.
*/
@Override
- public CVMarkerData findMarkersInFrame(byte[] frame){
+ public MarkerData findMarkersInFrame(byte[] frame){
if(ocvOn){
- int codes[] = new int[15];
- Bitmap tFrame, mFrame;
- Mat inImg = new Mat();
- Mat outImg = new Mat();
+ if(cameraCalibrated){
+ int[] codes = new int[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
+ float[] translations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 3];
+ float[] rotations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 9];
+ MarkerData data;
+ Bitmap tFrame, mFrame;
+ Mat inImg = new Mat();
+ Mat outImg = new Mat();
- // Decode the input image and convert it to an OpenCV matrix.
- tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
- Utils.bitmapToMat(tFrame, inImg);
+ // Fill the codes array with -1 to indicate markers that were not found;
+ for(int i : codes)
+ codes[i] = -1;
- // Find up to 15 markers in the input image.
- getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
+ // Decode the input image and convert it to an OpenCV matrix.
+ tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
+ Utils.bitmapToMat(tFrame, inImg);
- // Encode the output image as a JPEG image.
- mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
- Utils.matToBitmap(outImg, mFrame);
- mFrame.compress(CompressFormat.JPEG, 100, outputStream);
+ // Find the markers in the input image.
+ getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes, cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), translations, rotations);
- // Create the output data structure.
- CVMarkerData data = new CVMarkerData();
- data.outFrame = outputStream.toByteArray();
- data.markerCodes = codes;
+ // Encode the output image as a JPEG image.
+ mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
+ Utils.matToBitmap(outImg, mFrame);
+ mFrame.compress(CompressFormat.JPEG, 100, outputStream);
- // Clean up memory.
- tFrame.recycle();
- mFrame.recycle();
- outputStream.reset();
+ // Create and fill the output data structure.
+ data = new MarkerData();
+ data.outFrame = outputStream.toByteArray();
+ data.markerCodes = codes;
+ data.rotationMatrices = new Matrix3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
+ data.translationVectors = new Vector3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
- return data;
+ for(int i = 0, p = 0; i < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; i++, p += 3){
+ data.translationVectors[i] = new Vector3(translations[p], translations[p + 1], translations[p + 2]);
+ }
+ // TODO: Check that the matrix is being copied correctly.
+ for(int k = 0; k < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; k++){
+ data.rotationMatrices[k] = new Matrix3();
+ for(int row = 0; row < 3; row++){
+ for(int col = 0; col < 3; col++){
+ data.rotationMatrices[k].val[col + (row * 3)] = rotations[col + (row * 3) + (9 * k)];
+ }
+ }
+ }
+
+ // Clean up memory.
+ tFrame.recycle();
+ mFrame.recycle();
+ outputStream.reset();
+
+ return data;
+ }else{
+ Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): The camera has not been calibrated.");
+ return null;
+ }
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not ready or failed to load.");
return null;
@@ -358,13 +391,13 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
* calibration points array is null.
*/
@Override
- public CVCalibrationData findCalibrationPattern(byte[] frame){
+ public CalibrationData findCalibrationPattern(byte[] frame){
if(ocvOn){
boolean found;
float points[] = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2];
Bitmap tFrame, mFrame;
Mat inImg = new Mat(), outImg = new Mat();
- CVCalibrationData data = new CVCalibrationData();
+ CalibrationData data = new CalibrationData();
// Decode the input frame and convert it to an OpenCV Matrix.
tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
@@ -488,7 +521,7 @@ public class MainActivity extends AndroidApplication implements OSFunctionalityP
* @return True if and only if OpenCV initialized succesfully and calibrateCamera has been called previously.
*/
@Override
- public boolean cameraIsCalibrated() {
+ public boolean isCameraCalibrated() {
return ocvOn && cameraCalibrated;
}
}
From 2e176aae471d906285a12d9275cc1ec0a1bc7a01 Mon Sep 17 00:00:00 2001
From: unknown
Date: Wed, 14 May 2014 16:57:36 -0430
Subject: [PATCH 17/18] Transposed the output rotation matrix.
---
jni/cv_proc.cpp | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 9c38d88..6550821 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -20,7 +20,7 @@
#include "marker.hpp"
-//#define LOG_ENABLED
+#define LOG_ENABLED
#define MAX_MARKERS 5
#define TRANSLATION_VECTOR_POINTS 3
#define ROTATION_MATRIX_SIZE 9
@@ -79,9 +79,12 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
}
for(int k = 0; k < vMarkers.size(); k++){
+ log(TAG, "getMarkerCodesAndLocations(): Rotation matrix:");
for(int row = 0; row < 3; row++){
for(int col = 0; col < 3; col++){
- _rt[col + (row * 3) + (9 * k)] = vMarkers[k].rotation.at(row, col);
+ sprintf(codeMsg, "%f ", vMarkers[k].rotation.at(row, col));
+ log(TAG, codeMsg);
+ _rt[row + (col * 3) + (9 * k)] = vMarkers[k].rotation.at(row, col);
}
}
}
From 67c4c99cc532765df93c08d7956874586b40789d Mon Sep 17 00:00:00 2001
From: unknown
Date: Thu, 15 May 2014 12:13:43 -0430
Subject: [PATCH 18/18] Fixed rotation matrix. Added camera parameters getter
methods.
---
jni/cv_proc.cpp | 15 ++---
src/ve/ucv/ciens/ccg/nxtar/MainActivity.java | 66 ++++++--------------
2 files changed, 26 insertions(+), 55 deletions(-)
diff --git a/jni/cv_proc.cpp b/jni/cv_proc.cpp
index 6550821..5ca822e 100644
--- a/jni/cv_proc.cpp
+++ b/jni/cv_proc.cpp
@@ -20,7 +20,7 @@
#include "marker.hpp"
-#define LOG_ENABLED
+//#define LOG_ENABLED
#define MAX_MARKERS 5
#define TRANSLATION_VECTOR_POINTS 3
#define ROTATION_MATRIX_SIZE 9
@@ -52,9 +52,9 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
cv::Mat& mCam = *(cv::Mat*)camMat;
cv::Mat& mDist = *(cv::Mat*)distMat;
- jint * _codes = env->GetIntArrayElements(codes, 0);
- jfloat * _tr = env->GetFloatArrayElements(translations, 0);
- jfloat * _rt = env->GetFloatArrayElements(rotations, 0);
+ jint * _codes = env->GetIntArrayElements(codes, 0);
+ jfloat * _tr = env->GetFloatArrayElements(translations, 0);
+ jfloat * _rt = env->GetFloatArrayElements(rotations, 0);
// Convert the input image to the BGR color space.
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
@@ -79,12 +79,9 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
}
for(int k = 0; k < vMarkers.size(); k++){
- log(TAG, "getMarkerCodesAndLocations(): Rotation matrix:");
for(int row = 0; row < 3; row++){
for(int col = 0; col < 3; col++){
- sprintf(codeMsg, "%f ", vMarkers[k].rotation.at(row, col));
- log(TAG, codeMsg);
- _rt[row + (col * 3) + (9 * k)] = vMarkers[k].rotation.at(row, col);
+ _rt[col + (row * 3) + (9 * k)] = vMarkers[k].rotation.at(row, col);
}
}
}
@@ -128,7 +125,7 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
if(found){
log(TAG, "findCalibrationPattern(): Copying calibration points.");
for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
- _points[p] = (jfloat)v_points[i].x;
+ _points[p ] = (jfloat)v_points[i].x;
_points[p + 1] = (jfloat)v_points[i].y;
}
}
diff --git a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
index e777b79..ee04529 100644
--- a/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
+++ b/src/ve/ucv/ciens/ccg/nxtar/MainActivity.java
@@ -301,17 +301,6 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
// CVProcessor interface methods. //
////////////////////////////////////
- /**
- *
Implementation of the findMarkersInFrame method.
- *
- *
This implementation finds up to {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} markers in the input
- * image and returns their codes and locations in the CVMarkerData
- * structure. The markers are higlihted in the input image.
- *
- * @param frame The JPEG encoded input image.
- * @return A data structure containing the processed output image, the
- * detected marker codes and their respective locations.
- */
@Override
public MarkerData findMarkersInFrame(byte[] frame){
if(ocvOn){
@@ -351,7 +340,6 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
data.translationVectors[i] = new Vector3(translations[p], translations[p + 1], translations[p + 2]);
}
- // TODO: Check that the matrix is being copied correctly.
for(int k = 0; k < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; k++){
data.rotationMatrices[k] = new Matrix3();
for(int row = 0; row < 3; row++){
@@ -377,19 +365,6 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
}
}
- /**
- *
Implementation of the findCalibrationPattern method.
- *
- *
Attempts to detect a checkerboard calibration pattern in the input image.
- * If the pattenr is found the method returns an image with the pattern
- * highlighted and the spatial location of the calibration points in the
- * output data structure.
- *
- * @param frame The JPEG encoded input image.
- * @return A data structure containing the processed output image and the
- * location of the calibration points. If the pattern was not found, the returnd
- * calibration points array is null.
- */
@Override
public CalibrationData findCalibrationPattern(byte[] frame){
if(ocvOn){
@@ -428,11 +403,6 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
}
}
- /**
- *
Implementation of the calibrateCamera method.
- *
- *
Obtains the intrinsic camera parameters necesary for calibration.
- */
@Override
public void calibrateCamera(float[][] calibrationSamples, byte[] frame) {
if(ocvOn){
@@ -464,16 +434,6 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
}
}
-
- /**
- *
Implementation of the undistorFrame method.
- *
- *
Removes camera lens distortion from the input image using the
- * camera parameters obtained by the calibrateCamera method.
- *
- * @return A JPEG encoded image that is the input image after distortion correction. If the
- * camera has not been calibrated or OpenCV failed to load returns null.
- */
@Override
public byte[] undistortFrame(byte[] frame){
if(ocvOn){
@@ -514,14 +474,28 @@ public class MainActivity extends AndroidApplication implements AndroidFunctiona
}
}
- /**
- *
Indicates if OpenCV has been sucessfully initialized and used
- * to obtain the camera parameters for calibration.
- *
- * @return True if and only if OpenCV initialized succesfully and calibrateCamera has been called previously.
- */
@Override
public boolean isCameraCalibrated() {
return ocvOn && cameraCalibrated;
}
+
+ @Override
+ public float getFocalPointX() {
+ return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(0, 0)[0] : 0.0f;
+ }
+
+ @Override
+ public float getFocalPointY() {
+ return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(1, 1)[0] : 0.0f;
+ }
+
+ @Override
+ public float getCameraCenterX() {
+ return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(0, 2)[0] : 0.0f;
+ }
+
+ @Override
+ public float getCameraCenterY() {
+ return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(1, 2)[0] : 0.0f;
+ }
}