PageRenderTime 53ms CodeModel.GetById 24ms RepoModel.GetById 1ms app.codeStats 0ms

/test/ARToolkitTest/src/ARTestApp.cpp

https://github.com/rabidpraxis/Cinder
C++ | 341 lines | 250 code | 60 blank | 31 comment | 42 complexity | 2c236c2f207a0d57df8fcda0415baf39 MD5 | raw file
  1. #include "flint/app/AppBasic.h"
  2. #include "flint/Capture.h"
  3. #include "flint/Matrix.h"
  4. #include "HodginParticlesApp.h"
  5. #include "QuickTimeApp.h"
  6. #include "BoxSpinApp.h"
  7. #include "CarModule.h"
  8. #include "object.h"
  9. #include <stdio.h>
  10. #include <stdlib.h> // malloc(), free()
  11. #include <AR/config.h>
  12. #include <AR/video.h>
  13. #include <AR/param.h> // arParamDisp()
  14. #include <AR/ar.h>
  15. #include <AR/gsub_lite.h>
  16. using namespace fli;
  17. using namespace fli::app;
  18. #include <list>
  19. using std::list;
  20. using std::vector;
  21. //#define USE_AR_VIDEO
  22. class ARTestApp : public AppBasic {
  23. public:
  24. ~ARTestApp();
  25. void prepareSettings( Settings *settings );
  26. void setup();
  27. void keyDown( KeyEvent event );
  28. void update();
  29. void draw();
  30. int setupCamera( const char *cparam_name, char *vconf, ARParam *cparam );
  31. gl::Texture *mTexture;
  32. std::vector<Module*> mModules;
  33. ObjectData_T *object;
  34. int objectnum;
  35. float mCurrentAlpha;
  36. int mLockedMode;
  37. #if ! defined( USE_AR_VIDEO )
  38. fli::Capture *mCapture;
  39. #endif
  40. };
  41. // ============================================================================
  42. // Constants
  43. // ============================================================================
  44. #define VIEW_SCALEFACTOR 2.5 // 1.0 ARToolKit unit becomes 0.025 of my OpenGL units.
  45. #define VIEW_DISTANCE_MIN 2 // Objects closer to the camera than this will not be displayed.
  46. #define VIEW_DISTANCE_MAX 4000.0 // Objects further away from the camera than this will not be displayed.
  47. // ============================================================================
  48. // Global variables
  49. // ============================================================================
  50. // Preferences.
  51. // Image acquisition.
  52. static ARUint8 *gARTImage = NULL;
  53. // Marker detection.
  54. static int gARTThreshhold = 80;
  55. static long gCallCountMarkerDetect = 0;
  56. // Transformation matrix retrieval.
  57. #if 0
  58. static double gPatt_width = 80.0; // Per-marker, but we are using only 1 marker.
  59. static double gPatt_centre[2] = {0.0, 0.0}; // Per-marker, but we are using only 1 marker.
  60. static double gPatt_trans[3][4]; // Per-marker, but we are using only 1 marker.
  61. static int gPatt_found = FALSE; // Per-marker, but we are using only 1 marker.
  62. static int gPatt_id; // Per-marker, but we are using only 1 marker.
  63. #endif
  64. // Drawing.
  65. static ARParam gARTCparam;
  66. static ARGL_CONTEXT_SETTINGS_REF gArglSettings = NULL;
  67. int ARTestApp::setupCamera( const char *cparam_name, char *vconf, ARParam *cparam )
  68. {
  69. ARParam wparam;
  70. int xsize, ysize;
  71. #if defined( USE_AR_VIDEO )
  72. if( arVideoOpen( vconf ) < 0 ) {
  73. fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n");
  74. return (FALSE);
  75. }
  76. if( arVideoInqSize(&xsize, &ysize) < 0 ) return (FALSE);
  77. fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize);
  78. #else
  79. xsize = 800;
  80. ysize = 600;
  81. mCapture = new fli::Capture( xsize, ysize, SurfaceChannelOrder::ARGB );
  82. mCapture->startCapture();
  83. #endif
  84. if (arParamLoad( cparam_name, 1, &wparam ) < 0) {
  85. fprintf(stderr, "setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
  86. return (FALSE);
  87. }
  88. arParamChangeSize( &wparam, xsize, ysize, cparam );
  89. fprintf( stdout, "*** Camera Parameter ***\n" );
  90. arParamDisp( cparam );
  91. arInitCparam( cparam );
  92. #if defined( USE_AR_VIDEO )
  93. if( arVideoCapStart() != 0 ) {
  94. fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n");
  95. return (FALSE);
  96. }
  97. #endif
  98. mTexture = new gl::Texture( xsize, ysize, GL_RGBA, false, false );
  99. return (TRUE);
  100. }
  101. static int setupMarker(const char *patt_name, int *patt_id)
  102. {
  103. // Loading only 1 pattern in this example.
  104. if ((*patt_id = arLoadPatt(patt_name)) < 0) {
  105. fprintf(stderr, "setupMarker(): pattern load error !!\n");
  106. return (FALSE);
  107. }
  108. return (TRUE);
  109. }
  110. ARTestApp::~ARTestApp()
  111. {
  112. arglCleanup(gArglSettings);
  113. arVideoCapStop();
  114. arVideoClose();
  115. }
  116. void ARTestApp::prepareSettings( Settings *settings )
  117. {
  118. settings->setFrameRate( 24 );
  119. }
  120. void ARTestApp::setup()
  121. {
  122. const char *cparam_name = "camera_para.dat";
  123. #ifdef _WIN32
  124. char *vconf = "Data\\WDM_camera_flipV.xml";
  125. #else
  126. char *vconf = "";
  127. #endif
  128. // const char *patt_name = "patt.kanji";
  129. const char *model_name = "object_data2";
  130. std::string modelPath = getAppPath() + std::string( "/Contents/Resources/" ) + std::string( model_name );
  131. std::cout << "Reading model at " << modelPath << std::endl;
  132. if( (object=read_ObjData( ( getAppPath() + std::string( "/Contents/Resources/" ) ).c_str(), modelPath.c_str(), &objectnum)) == NULL )
  133. exit(0);
  134. printf("Objectfile num = %d\n", objectnum);
  135. std::string camPath = getAppPath() + std::string( "/Contents/Resources/" ) + std::string( cparam_name );
  136. if( ! setupCamera( camPath.c_str(), vconf, &gARTCparam) ) {
  137. fprintf(stderr, "main(): Unable to set up AR camera.\n");
  138. exit(-1);
  139. }
  140. // Setup argl library for current context.
  141. if ((gArglSettings = arglSetupForCurrentContext()) == NULL) {
  142. fprintf(stderr, "main(): arglSetupForCurrentContext() returned error.\n");
  143. exit(-1);
  144. }
  145. arUtilTimerReset();
  146. /* std::string pattPath = getAppPath() + std::string( "/Contents/Resources/" ) + std::string( patt_name );
  147. if( ! setupMarker( pattPath.c_str(), &gPatt_id) ) {
  148. fprintf(stderr, "main(): Unable to set up AR marker.\n");
  149. quit();
  150. }*/
  151. mCurrentAlpha = 1.0f;
  152. mLockedMode = -1;
  153. mModules.push_back( new QuickTimeApp() );
  154. mModules.push_back( new HodginParticlesApp() );
  155. mModules.push_back( new CarModule() );
  156. for( vector<Module*>::iterator modIt = mModules.begin(); modIt != mModules.end(); ++modIt )
  157. (*modIt)->setup( this );
  158. }
  159. void ARTestApp::keyDown( KeyEvent event )
  160. {
  161. switch( event.getChar() ) {
  162. case 'f':
  163. setFullScreen( ! isFullScreen() );
  164. break;
  165. case 'l':
  166. if( mLockedMode >= 0 )
  167. mLockedMode = -1;
  168. else {
  169. for( int i = 0; i < objectnum; ++i )
  170. if( object[i].visible )
  171. mLockedMode = i;
  172. }
  173. break;
  174. }
  175. }
  176. void ARTestApp::update()
  177. {
  178. ARMarkerInfo *marker_info; // Pointer to array holding the details of detected markers.
  179. int marker_num; // Count of number of markers detected.
  180. int j, k;
  181. // Grab a video frame.
  182. #if defined( USE_AR_VIDEO )
  183. ARUint8 *image;
  184. if ((image = arVideoGetImage()) != NULL) {
  185. #else
  186. if( mCapture->checkNewFrame() ) {
  187. #endif
  188. #if defined( USE_AR_VIDEO )
  189. gARTImage = image; // Save the fetched image.
  190. mTexture->enableAndBind();
  191. #else
  192. const fli::Surface8u &surface( mCapture->getSurface() );
  193. mTexture->update( surface );
  194. gARTImage = const_cast<uint8_t*>( surface.getData() );
  195. #endif
  196. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
  197. // Detect the markers in the video frame.
  198. if (arDetectMarker(gARTImage, gARTThreshhold, &marker_info, &marker_num) < 0) {
  199. exit(-1);
  200. }
  201. // check for known patterns
  202. for( int i = 0; i < objectnum; i++ ) {
  203. k = -1;
  204. for( j = 0; j < marker_num; j++ ) {
  205. if( object[i].id == marker_info[j].id) {
  206. /* you've found a pattern */
  207. if( k == -1 ) k = j;
  208. else /* make sure you have the best pattern (highest confidence factor) */
  209. if( marker_info[k].cf < marker_info[j].cf ) k = j;
  210. }
  211. }
  212. if( k == -1 ) {
  213. object[i].visible = 0;
  214. continue;
  215. }
  216. /* calculate the transform for each marker */
  217. if( object[i].visible == 0 ) {
  218. arGetTransMat(&marker_info[k],
  219. object[i].marker_center, object[i].marker_width,
  220. object[i].trans);
  221. }
  222. else {
  223. arGetTransMatCont(&marker_info[k], object[i].trans,
  224. object[i].marker_center, object[i].marker_width,
  225. object[i].trans);
  226. }
  227. object[i].visible = 1;
  228. }
  229. }
  230. if( mLockedMode >= 0 ) {
  231. for( int i = 0; i < objectnum; i++ ) {
  232. object[i].visible = 0;
  233. }
  234. object[mLockedMode].visible = 1;
  235. }
  236. for( int mod = 0; mod < objectnum; ++mod )
  237. mModules[mod]->update( this, object[mod].visible );
  238. }
  239. void ARTestApp::draw()
  240. {
  241. GLdouble p[16];
  242. GLdouble m[16];
  243. // Select correct buffer for this context.
  244. glClearColor( 0, 0, 0, 1 ); // Clear the buffers for new frame.
  245. gl::enableDepthWrite();
  246. glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); // Clear the buffers for new frame.
  247. gl::disableDepthRead();
  248. gl::disableDepthWrite();
  249. gl::enableAlphaBlending();
  250. if( object[0].visible || object[1].visible || object[2].visible )
  251. mCurrentAlpha += ( 0.0f - mCurrentAlpha ) * 0.05f;
  252. else
  253. mCurrentAlpha += ( 1.0f - mCurrentAlpha ) * 0.05f;
  254. gl::setMatricesScreenOrtho( getWindowWidth(), getWindowHeight() );
  255. // draw the camera image centered
  256. glColor4f( 1, 1, 1, 1 );//0.2f + mCurrentAlpha * 0.8f );
  257. float width = ( getWindowHeight() * ( mTexture->getWidth() / (float)mTexture->getHeight() ) );
  258. mTexture->draw( ( getWindowWidth() - width ) / 2.0f, 0, width, getWindowHeight() );
  259. glDisable( mTexture->getTarget() );
  260. #if defined( USE_AR_VIDEO )
  261. arVideoCapNext();
  262. gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
  263. #endif
  264. // Projection transformation.
  265. arglCameraFrustumRH( &gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p );
  266. glMatrixMode( GL_PROJECTION );
  267. glLoadMatrixd( p );
  268. // Calculate the camera position relative to the marker.
  269. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
  270. for( int mod = 0; mod < objectnum; ++mod ) {
  271. if( object[mod].visible ) {
  272. arglCameraViewRH( object[mod].trans, m, VIEW_SCALEFACTOR );
  273. glMatrixMode(GL_MODELVIEW);
  274. glLoadMatrixd( m );
  275. fli::Matrix44d mvd( m );
  276. mModules[mod]->draw( this, mvd * Vec4d( 0, 0, 0, 1 ) );
  277. }
  278. }
  279. }
  280. // This line tells Flint to actually create the application
  281. FLI_APP_BASIC( ARTestApp )