1  #include <stdio.h>


2  #include "PCASplit.h"


3 


4  #define WANT_STREAM // include.h will get stream fns


5  #define WANT_MATH // include.h will get math fns


6  // newmatap.h will get include.h


7  #include <newmatap.h> // need matrix applications


8  #include <newmatio.h> // need matrix output routines


9  #include <newmatrc.h>


10 


11  #ifdef use_namespace


12  using namespace NEWMAT; // access NEWMAT namespace


13  #endif


14 


15  namespace PCA {


16 


17  PCASplit::PCASplit()


18  : _maxLevel(4), _minDistance(0.5f), _distanceScale(0.2f), _finalMaxLevel(0), _indexCount(0)


19  {


20  _indices = new unsigned int[MAX_INDEX];


21  _memClusterChunk1 = new ClusterListNode[MAX_INDEX];


22  _memClusterChunk2 = new ClusterListNode[MAX_INDEX];


23  _curMemClusterChunk = _memClusterChunk1;


24  _memClusterChunkIndex = 0;


25  }


26 


27  PCASplit::~PCASplit()


28  {


29  delete [] _indices;


30  delete [] _memClusterChunk1;


31  delete [] _memClusterChunk2;


32  }


33 


34  void PCASplit::computeCentroid(Point* data, int count, Vector3& mean/*, float& minValue, float& maxValue*/)


35  {


36  float sumx= 0, sumy = 0, sumz = 0;


37  float size = 0;


38  float sumsize = 0;


39  for (int i = 0; i < count; ++i)


40  {


41  size = data[i].size;


42  sumx += data[i].position.x * size;


43  sumy += data[i].position.y * size;


44  sumz += data[i].position.z * size;


45  sumsize += size;


46  }


47 


48  sumsize = 1.0f / sumsize;


49  mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize);


50  }


51 


52  void PCASplit::computeCovariant(Point* data, int count, const Vector3& mean, float* m)


53  {


54  memset(m, 0, sizeof(float) * 9);


55 


56  for (int i = 0; i < count; ++i)


57  {


58  m[0] += (data[i].position.x  mean.x) * (data[i].position.x  mean.x);


59  m[1] += (data[i].position.x  mean.x) * (data[i].position.y  mean.y);


60  m[2] += (data[i].position.x  mean.x) * (data[i].position.z  mean.z);


61  m[4] += (data[i].position.y  mean.y) * (data[i].position.y  mean.y);


62  m[5] += (data[i].position.y  mean.y) * (data[i].position.z  mean.z);


63  m[8] += (data[i].position.z  mean.z) * (data[i].position.z  mean.z);


64  }


65 


66  float invCount = 1.0f / (count  1);


67  m[0] *= invCount;


68  m[1] *= invCount;


69  m[2] *= invCount;


70  m[4] *= invCount;


71  m[5] *= invCount;


72  m[8] *= invCount;


73  m[3] = m[1];


74  m[6] = m[2];


75  m[7] = m[5];


76  }


77 


78  void PCASplit::computeDistortion(Point* data, int count, const Vector3& mean, float& distortion, float& finalSize)


79  {


80  distortion = 0.0f;


81  finalSize = 0.0f;


82 


83  float maxSize = 0.0f;


84  float distance;


85  for (int i = 0; i < count; ++i)


86  {


87  // sum


88  distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z);


89  distortion += distance;


90 


91  if (data[i].size > maxSize)


92  {


93  maxSize = data[i].size;


94  }


95 


96  /*


97  finalSize += data[i].size * sqrt(distance);


98  */


99  if (distance > finalSize) finalSize = distance;


100  }


101 


102  // equation 2


103  //finalSize = 0.5f * sqrt (finalSize) / (float) (count  1) + maxSize;


104  finalSize = sqrt (finalSize) + maxSize;


105  }


106 


107  void PCASplit::init()


108  {


109  _curClusterNode = 0;


110  _curClusterCount = 0;


111  }


112 


113  Cluster* PCASplit::createClusterBlock(ClusterListNode* clusterList, int count, int level)


114  {


115  static int cc = 0;


116  cc += count;


117  Cluster* clusterBlock = new Cluster[count];


118 


119  _clusterHeader>numOfClusters[level  1] = count;


120  _clusterHeader>startPointerCluster[level  1] = clusterBlock;


121 


122  printf("Cluster created %d [in level %d]:total %d\n", count, level, cc);


123  fflush(stdout);


124 


125  int i = 0;


126  ClusterListNode* clusterNode = clusterList;


127  while (clusterNode)


128  {


129  clusterBlock[i].centroid = clusterList>data>centroid_t;


130  clusterBlock[i].level = level;


131  clusterBlock[i].scale = clusterList>data>scale_t;


132 


133  clusterNode = clusterNode>next;


134  ++i;


135  }


136  if (count != i)


137  {


138  printf("ERROR\n");


139  }


140  return clusterBlock;


141  }


142 


143  ClusterAccel* PCASplit::doIt(Point* data, int count)


144  {


145  init();


146 


147  _clusterHeader = new ClusterAccel(_maxLevel);


148 


149  Cluster* root = new Cluster;


150  Cluster_t* cluster_t = new Cluster_t();


151  cluster_t>points_t = data;


152  cluster_t>numOfPoints_t = count;


153  root>level = 1;


154 


155  _clusterHeader>root = root;


156  _clusterHeader>numOfClusters[0] = 1;


157  _clusterHeader>startPointerCluster[0] = root;


158 


159  Vector3 mean;


160  float distortion, scale;


161 


162  computeCentroid(cluster_t>points_t, cluster_t>numOfPoints_t, mean);


163  computeDistortion(cluster_t>points_t, cluster_t>numOfPoints_t, mean, distortion, scale);


164 


165  cluster_t>centroid_t = mean;


166  cluster_t>scale_t = scale;


167 


168  float mindistance = _minDistance;


169  int level = 2;


170 


171  ClusterListNode* clustNodeList = &(_memClusterChunk2[0]);


172  clustNodeList>data = cluster_t;


173  clustNodeList>next = 0;


174 


175  _curRoot = root;


176  do {


177  analyze(clustNodeList, _curRoot, level, mindistance);


178 


179  mindistance *= _distanceScale;


180  ++level;


181 


182  // swap memory buffer & init


183  _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1)?


184  _memClusterChunk2 : _memClusterChunk1;


185  _memClusterChunkIndex = 0;


186 


187  clustNodeList = _curClusterNode;


188 


189  } while (level <= _maxLevel);


190 


191  return _clusterHeader;


192  }


193 


194  void PCASplit::addLeafCluster(Cluster_t* cluster)


195  {


196  ClusterListNode* clusterNode = &_curMemClusterChunk[_memClusterChunkIndex++];


197  clusterNode>data = cluster;


198  clusterNode>next = _curClusterNode;


199  _curClusterNode = clusterNode;


200  ++_curClusterCount;


201  }


202 


203  void PCASplit::split(Point* data, int count, float limit)


204  {


205  Vector3 mean;


206  float m[9];


207 


208  computeCentroid(data, count, mean);


209  float scale, distortion;


210  computeDistortion(data, count, mean, distortion, scale);


211 


212  //if (distortion < limit)


213  if (scale < limit)


214  {


215  Cluster_t* cluster_t = new Cluster_t();


216  cluster_t>centroid_t = mean;


217  cluster_t>points_t = data;


218  cluster_t>numOfPoints_t = count;


219  cluster_t>scale_t = scale;


220  addLeafCluster(cluster_t);


221  return;


222  }


223 


224  computeCovariant(data, count, mean, m);


225 


226  SymmetricMatrix A(3);


227  for (int i = 1, index = 0; i <= 3; ++i)


228  for (int j = 1; j <= i; ++j)


229  {


230  A(i, j) = m[(i  1) * 3 + j  1];


231  }


232 


233 


234  Matrix U; DiagonalMatrix D;


235  eigenvalues(A,D,U);


236  Vector3 emax(U(1, 3), U(2, 3), U(3, 3));


237 


238  int index1Count = 0, index2Count = 0;


239 


240  int left = 0, right = count  1;


241 


242  Point p;


243  for (;left < right;)


244  {


245  while (left < count && emax.dot(data[left].position  mean) >= 0.0f) ++left;


246  while (right >= 0 && emax.dot(data[right].position  mean) < 0.0f) right;


247 


248  if (left > right) break;


249 


250  p = data[left];


251  data[left] = data[right];


252  data[right] = p;


253  ++left, right;


254 


255  }


256 


257  if (left == 0  right == count  1)


258  {


259  printf("error\n");


260  exit(1);


261  }


262  else


263  {


264  split(data, left, limit);


265  split(data + left, count  left, limit);


266  }


267  }


268 


269  void PCASplit::analyze(ClusterListNode* clusterNode, Cluster* parent, int level, float limit)


270  {


271  fflush(stdout);


272  if (level > _maxLevel) return;


273  else if (level > _finalMaxLevel) _finalMaxLevel = level;


274 


275  init();


276 


277  ClusterListNode* clNode = clusterNode;


278 


279  // initialize the indexCount of indices


280  _indexCount = 0;


281 


282  int i = 0;


283  while (clNode)


284  {


285  if (clNode>data)


286  {


287  split(clNode>data>points_t, clNode>data>numOfPoints_t, limit);


288  _indices[_indexCount++] = _curClusterCount;


289  }


290 


291  clNode = clNode>next;


292  }


293 


294  //Vector3 mean;


295  //computeCentroid(cluster>points, cluster>numOfPoints, mean);


296 


297  // the process values of split are in _curClusterNode and _curClusterCount


298  ClusterListNode* curClusterNode = _curClusterNode;


299  int curClusterNodeCount = _curClusterCount;


300 


301  if (curClusterNodeCount)


302  {


303 


304  // create and init centroid


305  Cluster* retClusterBlock = createClusterBlock(curClusterNode, curClusterNodeCount, level);


306 


307  _curRoot = retClusterBlock;


308 


309  if (level == _maxLevel)


310  {


311  ClusterListNode* node = curClusterNode;


312  if (_indexCount > 0)


313  {


314  // for parent


315  Point* points = new Point[curClusterNodeCount];


316 


317  parent[0].setChildren(retClusterBlock, _indices[0]);


318  parent[0].setPoints(points, _indices[0]);


319 


320  for (int i = 0, in = 0; i < curClusterNodeCount; ++i)


321  {


322  if (i >= _indices[in])


323  {


324  in++;


325  parent[in].setChildren(retClusterBlock + _indices[in  1], _indices[in]  _indices[in  1]);


326  parent[in].setPoints(points + _indices[in  1], _indices[in]  _indices[in  1]);


327  }


328 


329  retClusterBlock[i].scale = node>data>scale_t;


330  retClusterBlock[i].centroid = node>data>centroid_t;


331  retClusterBlock[i].points = node>data>points_t;


332  retClusterBlock[i].numOfPoints = node>data>numOfPoints_t;


333  retClusterBlock[i].color.set(


334  float(rand()) / RAND_MAX, float(rand()) / RAND_MAX, float(rand()) / RAND_MAX,


335  0.2);


336 


337  points[i].position = node>data>centroid_t;


338  points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);


339  points[i].size = node>data>scale_t;


340 


341  node = node>next;


342 


343  //::glGenBuffers(1, &(cluster[i].vbo));


344  //glBindBuffer(GL_ARRAY_BUFFER, cluster[i].vbo);


345  // glBufferData(GL_ARRAY_BUFFER,


346  // sizeof(Point) * cluster[i].numOfPoints,


347  // cluster[i].points,


348  // GL_STATIC_DRAW);


349  //glBindBuffer(GL_ARRAY_BUFFER, 0);


350 


351  }


352 


353  /*


354  ::glGenBuffers(1, &(_clusterHeader>_vbo[level  1]));


355  ::glBindBuffer(GL_ARRAY_BUFFER, _clusterHeader>_vbo[level  1]);


356  ::glBufferData(GL_ARRAY_BUFFER,


357  sizeof(Point) * curClusterNodeCount,


358  points,


359  GL_STATIC_DRAW);


360  ::glBindBuffer(GL_ARRAY_BUFFER, 0);


361  */


362  }


363 


364 


365 


366  }


367  else


368  {


369  Point* points = new Point[curClusterNodeCount];


370  ClusterListNode* node = curClusterNode;


371 


372  if (_indexCount > 0)


373  {


374  parent[0].setPoints(points, _indices[0]);


375  parent[0].setChildren(retClusterBlock, _indices[0]);


376  for (int k = 1; k < _indexCount; ++k)


377  {


378  parent[k].setPoints(points + _indices[k  1], _indices[k]  _indices[k  1]);


379  parent[k].setChildren(retClusterBlock + _indices[k  1], _indices[k]  _indices[k  1]);


380  }


381 


382  // set points of subclusters


383  for (int i = 0; i < curClusterNodeCount; ++i)


384  {


385  points[i].position = node>data>centroid_t;


386  points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);


387  points[i].size = node>data>scale_t;


388  node = node>next;


389  }


390 


391  }


392  }


393  }


394  }


395 


396  }


397 

