Commit 9627c6e6331753dbc0903ba73cc4371e30c9f421

Authored by Jiaming Guo
1 parent bf7e73b2

add splitting and mapping

Showing 2 changed files with 510 additions and 31 deletions   Show diff stats
@@ -32,16 +32,37 @@ stim::camera cam; //camera object @@ -32,16 +32,37 @@ stim::camera cam; //camera object
32 unsigned num_nets = 0; 32 unsigned num_nets = 0;
33 stim::gl_network<float> GT; //ground truth network 33 stim::gl_network<float> GT; //ground truth network
34 stim::gl_network<float> T; //test network 34 stim::gl_network<float> T; //test network
  35 +stim::gl_network<float> _GT; //splitted GT
  36 +stim::gl_network<float> _T; //splitted T
  37 +
  38 +unsigned ind = 0; //indicator of mapping
  39 +
  40 +std::vector<unsigned> _gt_t; // store indices of nearest edge points in _T for _GT
  41 +std::vector<unsigned> _t_gt; // store indices of nearest edge points in _GT for _T
35 42
36 //hard-coded parameters 43 //hard-coded parameters
37 float resample_rate = 0.5f; //sample rate for the network (fraction of sigma used as the maximum sample rate) 44 float resample_rate = 0.5f; //sample rate for the network (fraction of sigma used as the maximum sample rate)
38 float camera_factor = 1.2f; //start point of the camera as a function of X and Y size 45 float camera_factor = 1.2f; //start point of the camera as a function of X and Y size
39 float orbit_factor = 0.01f; //degrees per pixel used to orbit the camera 46 float orbit_factor = 0.01f; //degrees per pixel used to orbit the camera
40 47
  48 +//mouse click
  49 +bool LButtonDown = false; // true when left button down
  50 +bool RButtonDown = false;
  51 +
41 //mouse position tracking 52 //mouse position tracking
42 int mouse_x; 53 int mouse_x;
43 int mouse_y; 54 int mouse_y;
44 55
  56 +bool compareMode = true; // default mode is compare mode
  57 +bool mappingMode = false;
  58 +
  59 +// random color map
  60 +std::vector<float> colormap;
  61 +
  62 +// create display lists
  63 +GLuint dlist1;
  64 +GLuint dlist2;
  65 +
45 //OpenGL objects 66 //OpenGL objects
46 GLuint cmap_tex = 0; //texture name for the color map 67 GLuint cmap_tex = 0; //texture name for the color map
47 68
@@ -92,40 +113,107 @@ void glut_render_modelview(){ @@ -92,40 +113,107 @@ void glut_render_modelview(){
92 gluLookAt(eye[0], eye[1], eye[2], focus[0], focus[1], focus[2], up[0], up[1], up[2]); //set up the OpenGL camera 113 gluLookAt(eye[0], eye[1], eye[2], focus[0], focus[1], focus[2], up[0], up[1], up[2]); //set up the OpenGL camera
93 } 114 }
94 115
  116 +
95 //draws the network(s) 117 //draws the network(s)
96 void glut_render(void) { 118 void glut_render(void) {
97 119
98 - if(num_nets == 1){ //if a single network is loaded  
99 - glut_render_single_projection(); //fill the entire viewport  
100 - glut_render_modelview(); //set up the modelview matrix with camera details  
101 - glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen  
102 - GT.glCenterline(GT.nmags() - 1); //render the GT network (the only one loaded)  
103 - } 120 + if(ind == 0){
  121 + if(num_nets == 1){ //if a single network is loaded
  122 + glut_render_single_projection(); //fill the entire viewport
  123 + glut_render_modelview(); //set up the modelview matrix with camera details
  124 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  125 + GT.glCenterline(GT.nmags() - 1); //render the GT network (the only one loaded)
  126 + }
104 127
105 - if(num_nets == 2){ //if two networks are loaded 128 + if(num_nets == 2){ //if two networks are loaded
106 129
107 - glut_render_left_projection(); //set up a projection for the left half of the window  
108 - glut_render_modelview(); //set up the modelview matrix using camera details  
109 - glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen 130 + glut_render_left_projection(); //set up a projection for the left half of the window
  131 + glut_render_modelview(); //set up the modelview matrix using camera details
  132 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
110 133
111 - glEnable(GL_TEXTURE_1D); //enable texture mapping  
112 - glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); //texture map will be used as the network color  
113 - glBindTexture(GL_TEXTURE_1D, cmap_tex); //bind the Brewer texture map 134 + glEnable(GL_TEXTURE_1D); //enable texture mapping
  135 + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); //texture map will be used as the network color
  136 + glBindTexture(GL_TEXTURE_1D, cmap_tex); //bind the Brewer texture map
114 137
115 - GT.glCenterline(GT.nmags() - 1); //render the GT network  
116 -  
117 - glut_render_right_projection(); //set up a projection for the right half of the window  
118 - glut_render_modelview(); //set up the modelview matrix using camera details  
119 - T.glCenterline(T.nmags() - 1); //render the T network 138 + GT.glCenterline(GT.nmags() - 1); //render the GT network
120 139
  140 + glut_render_right_projection(); //set up a projection for the right half of the window
  141 + glut_render_modelview(); //set up the modelview matrix using camera details
  142 + T.glCenterline(T.nmags() - 1); //render the T network
  143 + }
  144 + }
  145 + else{
  146 + if(num_nets == 1){ //if a single network is loaded
  147 + glut_render_single_projection(); //fill the entire viewport
  148 + glut_render_modelview(); //set up the modelview matrix with camera details
  149 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  150 + _GT.glCenterline(_GT.nmags() - 1); //render the GT network (the only one loaded)
  151 + }
  152 + if(num_nets == 2){ //if two networks are loaded
  153 + if(compareMode){
  154 + glut_render_left_projection(); //set up a projection for the left half of the window
  155 + glut_render_modelview(); //set up the modelview matrix using camera details
  156 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  157 +
  158 + glEnable(GL_TEXTURE_1D); //enable texture mapping
  159 + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); //texture map will be used as the network color
  160 + glBindTexture(GL_TEXTURE_1D, cmap_tex); //bind the Brewer texture map
  161 +
  162 + _GT.glCenterline(_GT.nmags() - 1); //render the GT network
  163 +
  164 + glut_render_right_projection(); //set up a projection for the right half of the window
  165 + glut_render_modelview(); //set up the modelview matrix using camera details
  166 + _T.glCenterline(_T.nmags() - 1); //render the T network
  167 +
  168 + }
  169 + else{
  170 + glut_render_left_projection(); //set up a projection for the left half of the window
  171 + glut_render_modelview(); //set up the modelview matrix using camera details
  172 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  173 +
  174 + _GT.glRandColorCenterlineGT(dlist1, _gt_t, colormap);
  175 +
  176 + glut_render_right_projection(); //set up a projection for the right half of the window
  177 + glut_render_modelview(); //set up the modelview matrix using camera details
  178 + _T.glRandColorCenterlineT(dlist2, _t_gt, colormap);
  179 + }
  180 + }
121 } 181 }
122 182
  183 + std::ostringstream ss;
  184 + if(mappingMode) // if it is in mapping mode
  185 + ss<< "Mapping Mode";
  186 + else
  187 + ss<< "Compare Mode"; // default mode is compare mode
  188 +
  189 + glDisable(GL_TEXTURE_1D);
  190 + glMatrixMode(GL_PROJECTION); //Set up the 2d viewport for text printing
  191 + glPushMatrix();
  192 + glLoadIdentity();
  193 + int X = glutGet(GLUT_WINDOW_WIDTH);
  194 + int Y = glutGet(GLUT_WINDOW_HEIGHT);
  195 + glViewport(0, 0, X/2, Y);
  196 + gluOrtho2D(0, X, 0, Y);
  197 + glColor3f(0.0, 1.0, 0.0); // using green to show mode
  198 +
  199 + glMatrixMode(GL_MODELVIEW);
  200 + glPushMatrix();
  201 + glLoadIdentity();
  202 +
  203 + glRasterPos2f(0, 5); //print text in the bottom left corner
  204 + glutBitmapString(GLUT_BITMAP_TIMES_ROMAN_24, (const unsigned char*) (ss.str().c_str()));
  205 +
  206 + glPopMatrix();
  207 + glMatrixMode(GL_PROJECTION);
  208 + glPopMatrix();
  209 +
123 glutSwapBuffers(); 210 glutSwapBuffers();
124 } 211 }
125 212
126 // defines camera motion based on mouse dragging 213 // defines camera motion based on mouse dragging
127 void glut_motion(int x, int y){ 214 void glut_motion(int x, int y){
128 215
  216 + if(LButtonDown == true && RButtonDown == false){
129 217
130 float theta = orbit_factor * (mouse_x - x); //determine the number of degrees along the x-axis to rotate 218 float theta = orbit_factor * (mouse_x - x); //determine the number of degrees along the x-axis to rotate
131 float phi = orbit_factor * (y - mouse_y); //number of degrees along the y-axis to rotate 219 float phi = orbit_factor * (y - mouse_y); //number of degrees along the y-axis to rotate
@@ -136,12 +224,47 @@ void glut_motion(int x, int y){ @@ -136,12 +224,47 @@ void glut_motion(int x, int y){
136 mouse_y = y; 224 mouse_y = y;
137 225
138 glutPostRedisplay(); //re-draw the visualization 226 glutPostRedisplay(); //re-draw the visualization
  227 + }
139 } 228 }
140 229
141 // sets the mouse position when clicked 230 // sets the mouse position when clicked
142 void glut_mouse(int button, int state, int x, int y){ 231 void glut_mouse(int button, int state, int x, int y){
143 - mouse_x = x;  
144 - mouse_y = y; 232 +
  233 + if(button == GLUT_LEFT_BUTTON && state == GLUT_DOWN){
  234 + mouse_x = x;
  235 + mouse_y = y;
  236 + LButtonDown = true;
  237 + }
  238 + else if(button == GLUT_RIGHT_BUTTON && state == GLUT_DOWN){
  239 + mouse_x = x;
  240 + mouse_y = y;
  241 + RButtonDown = true;
  242 + }
  243 + else if(button == GLUT_LEFT_BUTTON && state == GLUT_UP){
  244 + mouse_x = x;
  245 + mouse_y = y;
  246 + LButtonDown = false;
  247 + }
  248 + else if(button == GLUT_RIGHT_BUTTON && state == GLUT_UP){
  249 + mouse_x = x;
  250 + mouse_y = y;
  251 + RButtonDown = false;
  252 + }
  253 +}
  254 +
  255 +void glut_keyboard(unsigned char key, int x, int y){
  256 + if(key == 'm') // press m to change mode
  257 + {
  258 + if(compareMode && !mappingMode){ // if it is in compare mode
  259 + compareMode = false;
  260 + mappingMode = true;
  261 + }
  262 + else{ // if it is in mapping mode
  263 + compareMode = true;
  264 + mappingMode = false;
  265 + }
  266 + }
  267 + glutPostRedisplay();
145 } 268 }
146 269
147 #define BREWER_CTRL_PTS 11 //number of control points in the Brewer map 270 #define BREWER_CTRL_PTS 11 //number of control points in the Brewer map
@@ -190,16 +313,16 @@ void glut_initialize(){ @@ -190,16 +313,16 @@ void glut_initialize(){
190 glutDisplayFunc(glut_render); //function executed for rendering - renders networks 313 glutDisplayFunc(glut_render); //function executed for rendering - renders networks
191 glutMouseFunc(glut_mouse); //executed on a mouse click - sets starting mouse positions for rotations 314 glutMouseFunc(glut_mouse); //executed on a mouse click - sets starting mouse positions for rotations
192 glutMotionFunc(glut_motion); //executed when the mouse is moved while a button is pressed 315 glutMotionFunc(glut_motion); //executed when the mouse is moved while a button is pressed
  316 + if(ind == 1) //only in mapping mode, keyboard will be used
  317 + glutKeyboardFunc(glut_keyboard);
193 318
194 - texture_initialize(); //set up texture mapping (create texture maps, enable features) 319 + texture_initialize(); //set up texture mapping (create texture maps, enable features)
195 320
196 stim::vec3<float> c = bb.center(); //get the center of the network bounding box 321 stim::vec3<float> c = bb.center(); //get the center of the network bounding box
197 322
198 //place the camera along the z-axis at a distance determined by the network size along x and y 323 //place the camera along the z-axis at a distance determined by the network size along x and y
199 cam.setPosition(c + stim::vec<float>(0, 0, camera_factor * std::max(bb.size()[0], bb.size()[1]))); 324 cam.setPosition(c + stim::vec<float>(0, 0, camera_factor * std::max(bb.size()[0], bb.size()[1])));
200 cam.LookAt(c[0], c[1], c[2]); //look at the center of the network 325 cam.LookAt(c[0], c[1], c[2]); //look at the center of the network
201 -  
202 - glClearColor(1, 1, 1, 1);  
203 } 326 }
204 327
205 #ifdef __CUDACC__ 328 #ifdef __CUDACC__
@@ -231,6 +354,27 @@ void compare(float sigma, int device){ @@ -231,6 +354,27 @@ void compare(float sigma, int device){
231 std::cout << "FPR: " << FNR << std::endl; 354 std::cout << "FPR: " << FNR << std::endl;
232 } 355 }
233 356
  357 +void map(float sigma, int device){
  358 +
  359 + _GT.split(GT, T, sigma, device);
  360 + _T.split(T, GT, sigma, device);
  361 +
  362 + _GT.mapping(_T, _gt_t, device);
  363 + _T.mapping(_GT, _t_gt, device);
  364 +
  365 + size_t num = _gt_t.size(); // also create random color for unmapping edge, but won't be used though
  366 + colormap.resize(3*num);
  367 + for(int i = 0; i < 3*num; i++)
  368 + colormap[i] = rand()/(float)RAND_MAX;
  369 +
  370 + //calculate the metrics
  371 + float FPR = _GT.average(0); //calculate the metrics
  372 + float FNR = _T.average(0);
  373 +
  374 + std::cout << "FNR: " << FPR << std::endl; //print false alarms and misses
  375 + std::cout << "FPR: " << FNR << std::endl;
  376 +}
  377 +
234 // writes features of the networks i.e average segment length, tortuosity, branching index, contraction, fractal dimension, number of end and branch points to a csv file 378 // writes features of the networks i.e average segment length, tortuosity, branching index, contraction, fractal dimension, number of end and branch points to a csv file
235 // Pranathi wrote this - saves network features to a CSV file 379 // Pranathi wrote this - saves network features to a CSV file
236 void features(std::string filename){ 380 void features(std::string filename){
@@ -265,7 +409,7 @@ void advertise(){ @@ -265,7 +409,7 @@ void advertise(){
265 std::cout<<"Thank you for using the NetMets network comparison tool!"<<std::endl; 409 std::cout<<"Thank you for using the NetMets network comparison tool!"<<std::endl;
266 std::cout<<"Scalable Tissue Imaging and Modeling (STIM) Lab, University of Houston"<<std::endl; 410 std::cout<<"Scalable Tissue Imaging and Modeling (STIM) Lab, University of Houston"<<std::endl;
267 std::cout<<"Developers: Pranathi Vemuri, David Mayerich"<<std::endl; 411 std::cout<<"Developers: Pranathi Vemuri, David Mayerich"<<std::endl;
268 - std::cout<<"Source: https://git.stim.ee.uh.edu/segmentation/netmets"<<std::endl; 412 + std::cout<<"Source: https://git.stim.ee.uh.edu/segmentation/netmets" <<std::endl;
269 std::cout<<"========================================================================="<<std::endl<<std::endl; 413 std::cout<<"========================================================================="<<std::endl<<std::endl;
270 414
271 std::cout<<"usage: netmets file1 file2 --sigma 3"<<std::endl; 415 std::cout<<"usage: netmets file1 file2 --sigma 3"<<std::endl;
@@ -286,6 +430,7 @@ int main(int argc, char* argv[]) @@ -286,6 +430,7 @@ int main(int argc, char* argv[])
286 args.add("gui", "display the network or network comparison using OpenGL"); 430 args.add("gui", "display the network or network comparison using OpenGL");
287 args.add("device", "choose specific device to run", "0"); 431 args.add("device", "choose specific device to run", "0");
288 args.add("features", "save features to a CSV file, specify file name"); 432 args.add("features", "save features to a CSV file, specify file name");
  433 + args.add("mapping", "mapping input according to similarity");
289 434
290 args.parse(argc, argv); //parse the user arguments 435 args.parse(argc, argv); //parse the user arguments
291 436
@@ -310,14 +455,25 @@ int main(int argc, char* argv[]) @@ -310,14 +455,25 @@ int main(int argc, char* argv[])
310 features(args["features"].as_string()); 455 features(args["features"].as_string());
311 GT = GT.resample(resample_rate * sigma); //resample both networks based on the sigma value 456 GT = GT.resample(resample_rate * sigma); //resample both networks based on the sigma value
312 T = T.resample(resample_rate * sigma); 457 T = T.resample(resample_rate * sigma);
313 - setdevice(device);  
314 - compare(sigma, device); //run the comparison algorithm 458 + if(args["mapping"].is_set()){
  459 + map(sigma, device);
  460 + }
  461 + else
  462 + compare(sigma, device); //run the comparison algorithm
315 } 463 }
316 464
317 //if a GUI is requested, display the network using OpenGL 465 //if a GUI is requested, display the network using OpenGL
318 - if(args["gui"].is_set()){  
319 - bb = GT.boundingbox(); //generate a bounding volume  
320 - glut_initialize(); //create the GLUT window and set callback functions  
321 - glutMainLoop(); // enter GLUT event processing cycle  
322 - } 466 + if(args["gui"].is_set()){
  467 + if(args["mapping"].is_set()){
  468 + ind = 1;
  469 + bb = _GT.boundingbox(); //generate a bounding volume
  470 + glut_initialize(); //create the GLUT window and set callback functions
  471 + glutMainLoop(); // enter GLUT event processing cycle
  472 + }
  473 + else{
  474 + bb = GT.boundingbox(); //generate a bounding volume
  475 + glut_initialize(); //create the GLUT window and set callback functions
  476 + glutMainLoop(); // enter GLUT event processing cycle
  477 + }
  478 + }
323 } 479 }
main_dep.cu 0 → 100644
  1 +#include <stdlib.h>
  2 +#include <string>
  3 +#include <fstream>
  4 +#include <algorithm>
  5 +
  6 +//OpenGL includes
  7 +#include <GL/glut.h>
  8 +#include <GL/freeglut.h>
  9 +
  10 +//STIM includes
  11 +#include <stim/visualization/gl_network.h>
  12 +#include <stim/biomodels/network.h>
  13 +#include <stim/visualization/gl_aaboundingbox.h>
  14 +#include <stim/parser/arguments.h>
  15 +#include <stim/visualization/camera.h>
  16 +
  17 +#ifdef __CUDACC__
  18 +//CUDA includes
  19 +#include <cuda.h>
  20 +#endif
  21 +
  22 +//ANN includes
  23 +//#include <ANN/ANN.h>
  24 +
  25 +//BOOST includes
  26 +#include <boost/tuple/tuple.hpp>
  27 +
  28 +//visualization objects
  29 +stim::gl_aaboundingbox<float> bb; //axis-aligned bounding box object
  30 +stim::camera cam; //camera object
  31 +
  32 +unsigned num_nets = 0;
  33 +stim::gl_network<float> GT; //ground truth network
  34 +stim::gl_network<float> T; //test network
  35 +
  36 +//hard-coded parameters
  37 +float resample_rate = 0.5f; //sample rate for the network (fraction of sigma used as the maximum sample rate)
  38 +float camera_factor = 1.2f; //start point of the camera as a function of X and Y size
  39 +float orbit_factor = 0.01f; //degrees per pixel used to orbit the camera
  40 +
  41 +//mouse position tracking
  42 +int mouse_x;
  43 +int mouse_y;
  44 +
  45 +//OpenGL objects
  46 +GLuint cmap_tex = 0; //texture name for the color map
  47 +
  48 +//sets an OpenGL viewport taking up the entire window
  49 +void glut_render_single_projection(){
  50 +
  51 + glMatrixMode(GL_PROJECTION); //load the projection matrix for editing
  52 + glLoadIdentity(); //start with the identity matrix
  53 + int X = glutGet(GLUT_WINDOW_WIDTH); //use the whole screen for rendering
  54 + int Y = glutGet(GLUT_WINDOW_HEIGHT);
  55 + glViewport(0, 0, X, Y); //specify a viewport for the entire window
  56 + float aspect = (float)X / (float)Y; //calculate the aspect ratio
  57 + gluPerspective(60, aspect, 0.1, 1000000); //set up a perspective projection
  58 +}
  59 +
  60 +//sets an OpenGL viewport taking up the left half of the window
  61 +void glut_render_left_projection(){
  62 +
  63 + glMatrixMode(GL_PROJECTION); //load the projection matrix for editing
  64 + glLoadIdentity(); //start with the identity matrix
  65 + int X = glutGet(GLUT_WINDOW_WIDTH) / 2; //only use half of the screen for the viewport
  66 + int Y = glutGet(GLUT_WINDOW_HEIGHT);
  67 + glViewport(0, 0, X, Y); //specify the viewport on the left
  68 + float aspect = (float)X / (float)Y; //calculate the aspect ratio
  69 + gluPerspective(60, aspect, 0.1, 1000000); //set up a perspective projection
  70 +}
  71 +
  72 +//sets an OpenGL viewport taking up the right half of the window
  73 +void glut_render_right_projection(){
  74 +
  75 + glMatrixMode(GL_PROJECTION); //load the projection matrix for editing
  76 + glLoadIdentity(); //start with the identity matrix
  77 + int X = glutGet(GLUT_WINDOW_WIDTH) / 2; //only use half of the screen for the viewport
  78 + int Y = glutGet(GLUT_WINDOW_HEIGHT);
  79 + glViewport(X, 0, X, Y); //specify the viewport on the right
  80 + float aspect = (float)X / (float)Y; //calculate the aspect ratio
  81 + gluPerspective(60, aspect, 0.1, 1000000); //set up a perspective projection
  82 +}
  83 +
  84 +void glut_render_modelview(){
  85 +
  86 + glMatrixMode(GL_MODELVIEW); //load the modelview matrix for editing
  87 + glLoadIdentity(); //start with the identity matrix
  88 + stim::vec3<float> eye = cam.getPosition(); //get the camera position (eye point)
  89 + stim::vec3<float> focus = cam.getLookAt(); //get the camera focal point
  90 + stim::vec3<float> up = cam.getUp(); //get the camera "up" orientation
  91 +
  92 + gluLookAt(eye[0], eye[1], eye[2], focus[0], focus[1], focus[2], up[0], up[1], up[2]); //set up the OpenGL camera
  93 +}
  94 +
  95 +//draws the network(s)
  96 +void glut_render(void) {
  97 +
  98 + if(num_nets == 1){ //if a single network is loaded
  99 + glut_render_single_projection(); //fill the entire viewport
  100 + glut_render_modelview(); //set up the modelview matrix with camera details
  101 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  102 + GT.glCenterline(GT.nmags() - 1); //render the GT network (the only one loaded)
  103 + }
  104 +
  105 + if(num_nets == 2){ //if two networks are loaded
  106 +
  107 + glut_render_left_projection(); //set up a projection for the left half of the window
  108 + glut_render_modelview(); //set up the modelview matrix using camera details
  109 + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the screen
  110 +
  111 + glEnable(GL_TEXTURE_1D); //enable texture mapping
  112 + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); //texture map will be used as the network color
  113 + glBindTexture(GL_TEXTURE_1D, cmap_tex); //bind the Brewer texture map
  114 +
  115 + GT.glCenterline(GT.nmags() - 1); //render the GT network
  116 +
  117 + glut_render_right_projection(); //set up a projection for the right half of the window
  118 + glut_render_modelview(); //set up the modelview matrix using camera details
  119 + T.glCenterline(T.nmags() - 1); //render the T network
  120 +
  121 + }
  122 +
  123 + glutSwapBuffers();
  124 +}
  125 +
  126 +// defines camera motion based on mouse dragging
  127 +void glut_motion(int x, int y){
  128 +
  129 +
  130 + float theta = orbit_factor * (mouse_x - x); //determine the number of degrees along the x-axis to rotate
  131 + float phi = orbit_factor * (y - mouse_y); //number of degrees along the y-axis to rotate
  132 +
  133 + cam.OrbitFocus(theta, phi); //rotate the camera around the focal point
  134 +
  135 + mouse_x = x; //update the mouse position
  136 + mouse_y = y;
  137 +
  138 + glutPostRedisplay(); //re-draw the visualization
  139 +}
  140 +
  141 +// sets the mouse position when clicked
  142 +void glut_mouse(int button, int state, int x, int y){
  143 + mouse_x = x;
  144 + mouse_y = y;
  145 +}
  146 +
  147 +#define BREWER_CTRL_PTS 11 //number of control points in the Brewer map
  148 +void texture_initialize(){
  149 +
  150 + //define the colormap
  151 + static float brewer_map[BREWER_CTRL_PTS][3] = { //generate a Brewer color map (blue to red)
  152 + {0.192157f, 0.211765f, 0.584314f},
  153 + {0.270588f, 0.458824f, 0.705882f},
  154 + {0.454902f, 0.678431f, 0.819608f},
  155 + {0.670588f, 0.85098f, 0.913725f},
  156 + {0.878431f, 0.952941f, 0.972549f},
  157 + {1.0f, 1.0f, 0.74902f},
  158 + {0.996078f, 0.878431f, 0.564706f},
  159 + {0.992157f, 0.682353f, 0.380392f},
  160 + {0.956863f, 0.427451f, 0.262745f},
  161 + {0.843137f, 0.188235f, 0.152941f},
  162 + {0.647059f, 0.0f, 0.14902f}
  163 + };
  164 +
  165 + glGenTextures(1, &cmap_tex); //generate a texture map name
  166 + glBindTexture(GL_TEXTURE_1D, cmap_tex); //bind the texture map
  167 +
  168 + glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); //enable linear interpolation
  169 + glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  170 + glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP); //clamp the values at the minimum and maximum
  171 + glTexImage1D(GL_TEXTURE_1D, 0, 3, BREWER_CTRL_PTS, 0, GL_RGB, GL_FLOAT, //upload the texture map to the GPU
  172 + brewer_map);
  173 +}
  174 +
  175 +//Initialize the OpenGL (GLUT) window, including starting resolution, callbacks, texture maps, and camera
  176 +void glut_initialize(){
  177 +
  178 + int myargc = 1; //GLUT requires arguments, so create some bogus ones
  179 + char* myargv[1];
  180 + myargv [0]=strdup ("netmets");
  181 +
  182 + glutInit(&myargc, myargv); //pass bogus arguments to glutInit()
  183 + glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA); //generate a color buffer, depth buffer, and enable double buffering
  184 + glutInitWindowPosition(100,100); //set the initial window position
  185 + glutInitWindowSize(320,320); //set the initial window size
  186 + glutCreateWindow("NetMets - STIM Lab, UH"); //set the dialog box title
  187 +
  188 +
  189 + // register callback functions
  190 + glutDisplayFunc(glut_render); //function executed for rendering - renders networks
  191 + glutMouseFunc(glut_mouse); //executed on a mouse click - sets starting mouse positions for rotations
  192 + glutMotionFunc(glut_motion); //executed when the mouse is moved while a button is pressed
  193 +
  194 + texture_initialize(); //set up texture mapping (create texture maps, enable features)
  195 +
  196 + stim::vec3<float> c = bb.center(); //get the center of the network bounding box
  197 +
  198 + //place the camera along the z-axis at a distance determined by the network size along x and y
  199 + cam.setPosition(c + stim::vec<float>(0, 0, camera_factor * std::max(bb.size()[0], bb.size()[1])));
  200 + cam.LookAt(c[0], c[1], c[2]); //look at the center of the network
  201 +
  202 + glClearColor(1, 1, 1, 1);
  203 +}
  204 +
  205 +#ifdef __CUDACC__
  206 +void setdevice(int &device){
  207 + int count;
  208 + cudaGetDeviceCount(&count); // numbers of device that are available
  209 + if(count < device + 1){
  210 + std::cout<<"No such device available, please set another device"<<std::endl;
  211 + exit(1);
  212 + }
  213 +}
  214 +#else
  215 +void setdevice(int &device){
  216 + device = -1;
  217 +}
  218 +#endif
  219 +
  220 +//compare both networks and fill the networks with error information
  221 +void compare(float sigma, int device){
  222 +
  223 + GT = GT.compare(T, sigma, device); //compare the ground truth to the test case - store errors in GT
  224 + T = T.compare(GT, sigma, device); //compare the test case to the ground truth - store errors in T
  225 +
  226 + //calculate the metrics
  227 + float FPR = GT.average(0); //calculate the metrics
  228 + float FNR = T.average(0);
  229 +
  230 + std::cout << "FNR: " << FPR << std::endl; //print false alarms and misses
  231 + std::cout << "FPR: " << FNR << std::endl;
  232 +}
  233 +
  234 +// writes features of the networks i.e average segment length, tortuosity, branching index, contraction, fractal dimension, number of end and branch points to a csv file
  235 +// Pranathi wrote this - saves network features to a CSV file
  236 +void features(std::string filename){
  237 + double avgL_t, avgL_gt, avgT_t, avgT_gt, avgB_t, avgB_gt, avgC_t, avgC_gt, avgFD_t, avgFD_gt;
  238 + unsigned int e_t, e_gt, b_gt, b_t;
  239 + avgL_gt = GT.Lengths();
  240 + avgT_gt = GT.Tortuosities();
  241 + avgL_t = T.Lengths();
  242 + avgT_t = T.Tortuosities();
  243 + avgB_gt = GT.BranchingIndex();
  244 + avgB_t = T.BranchingIndex();
  245 + avgC_gt = GT.Contractions();
  246 + avgFD_gt = GT.FractalDimensions();
  247 + avgC_t = T.Contractions();
  248 + avgFD_t = T.FractalDimensions();
  249 + e_gt = GT.EndP();
  250 + e_t = T.EndP();
  251 + b_gt = GT.BranchP();
  252 + b_t = T.BranchP();
  253 + std::ofstream myfile;
  254 + myfile.open (filename.c_str());
  255 + myfile << "Length, Tortuosity, Contraction, Fractal Dimension, Branch Points, End points, Branching Index, \n";
  256 + myfile << avgL_gt << "," << avgT_gt << "," << avgC_gt << "," << avgFD_gt << "," << b_gt << "," << e_gt << "," << avgB_gt <<std::endl;
  257 + myfile << avgL_t << "," << avgT_t << "," << avgC_t << "," << avgFD_t << "," << b_t << "," << e_t << "," << avgB_t <<std::endl;
  258 + myfile.close();
  259 +}
  260 +
  261 +// Output an advertisement for the lab, authors, and usage information
  262 +void advertise(){
  263 + std::cout<<std::endl<<std::endl;
  264 + std::cout<<"========================================================================="<<std::endl;
  265 + std::cout<<"Thank you for using the NetMets network comparison tool!"<<std::endl;
  266 + std::cout<<"Scalable Tissue Imaging and Modeling (STIM) Lab, University of Houston"<<std::endl;
  267 + std::cout<<"Developers: Pranathi Vemuri, David Mayerich"<<std::endl;
  268 + std::cout<<"Source: https://git.stim.ee.uh.edu/segmentation/netmets"<<std::endl;
  269 + std::cout<<"========================================================================="<<std::endl<<std::endl;
  270 +
  271 + std::cout<<"usage: netmets file1 file2 --sigma 3"<<std::endl;
  272 + std::cout<<" compare two files with a tolerance of 3 (units defined by the network)"<<std::endl<<std::endl;
  273 + std::cout<<" netmets file1 --gui"<<std::endl;
  274 + std::cout<<" load a file and display it using OpenGL"<<std::endl<<std::endl;
  275 + std::cout<<" netmets file1 file2 --device 0"<<std::endl;
  276 + std::cout<<" compare two files using device 0 (if there isn't a gpu, use cpu)"<<std::endl<<std::endl;
  277 +}
  278 +
  279 +int main(int argc, char* argv[])
  280 +{
  281 + stim::arglist args; //create an instance of arglist
  282 +
  283 + //add arguments
  284 + args.add("help", "prints this help");
  285 + args.add("sigma", "force a sigma value to specify the tolerance of the network comparison", "3");
  286 + args.add("gui", "display the network or network comparison using OpenGL");
  287 + args.add("device", "choose specific device to run", "0");
  288 + args.add("features", "save features to a CSV file, specify file name");
  289 +
  290 + args.parse(argc, argv); //parse the user arguments
  291 +
  292 + if(args["help"].is_set() || args.nargs() == 0){ //test for help
  293 + advertise(); //output the advertisement
  294 + std::cout<<args.str(); //output arguments
  295 + exit(1); //exit
  296 + }
  297 +
  298 + if(args.nargs() >= 1){ //if at least one network file is specified
  299 + num_nets = 1; //set the number of networks to one
  300 + GT.load_obj(args.arg(0)); //load the specified file as the ground truth
  301 + /*GT.to_txt("Graph.txt");*/
  302 + }
  303 +
  304 + if(args.nargs() == 2){ //if two files are specified, they will be displayed in neighboring viewports and compared
  305 + int device = args["device"].as_int(); //get the device value from the user
  306 + num_nets = 2; //set the number of networks to two
  307 + float sigma = args["sigma"].as_float(); //get the sigma value from the user
  308 + T.load_obj(args.arg(1)); //load the second (test) network
  309 + if(args["features"].is_set()) //if the user wants to save features
  310 + features(args["features"].as_string());
  311 + GT = GT.resample(resample_rate * sigma); //resample both networks based on the sigma value
  312 + T = T.resample(resample_rate * sigma);
  313 + setdevice(device);
  314 + compare(sigma, device); //run the comparison algorithm
  315 + }
  316 +
  317 + //if a GUI is requested, display the network using OpenGL
  318 + if(args["gui"].is_set()){
  319 + bb = GT.boundingbox(); //generate a bounding volume
  320 + glut_initialize(); //create the GLUT window and set callback functions
  321 + glutMainLoop(); // enter GLUT event processing cycle
  322 + }
  323 +}