source: trunk/packages/vizservers/nanovis/PCASplit.cpp @ 3596

Last change on this file since 3596 was 3502, checked in by ldelgass, 12 years ago

Add basic VTK structured points reader to nanovis, update copyright dates.

  • Property svn:eol-style set to native
File size: 10.8 KB
RevLine 
[2798]1/* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
[3502]2/*
3 * Copyright (c) 2004-2013  HUBzero Foundation, LLC
4 *
5 */
[1028]6#include <stdio.h>
7
8#define WANT_STREAM                  // include.h will get stream fns
9#define WANT_MATH                    // include.h will get math fns
10                                     // newmatap.h will get include.h
11#include <newmatap.h>                // need matrix applications
12#include <newmatio.h>                // need matrix output routines
13#include <newmatrc.h>
14
[2832]15#include "PCASplit.h"
16
[3492]17using namespace vrmath;
18
[1028]19#ifdef use_namespace
20using namespace NEWMAT;              // access NEWMAT namespace
21#endif
22
[2832]23using namespace PCA;
[1028]24
25PCASplit::PCASplit() :
[2832]26    _maxLevel(4),
27    _minDistance(0.5f),
28    _distanceScale(0.2f),
29    _indexCount(0),
[1028]30    _finalMaxLevel(0)
31{
32    _indices = new unsigned int[MAX_INDEX];
33    _memClusterChunk1 = new ClusterListNode[MAX_INDEX];
34    _memClusterChunk2 = new ClusterListNode[MAX_INDEX];
35    _curMemClusterChunk = _memClusterChunk1;
36    _memClusterChunkIndex = 0;
37}
38
39PCASplit::~PCASplit()
40{
41    delete [] _indices;
42    delete [] _memClusterChunk1;
43    delete [] _memClusterChunk2;
44}
45
46void
[3492]47PCASplit::computeCentroid(Point *data, int count, Vector3f& mean)
[1028]48{
[2832]49    float sumx = 0, sumy = 0, sumz = 0;
[1028]50    float size = 0;
51    float sumsize = 0;
52    for (int i = 0; i < count; ++i) {
53        size = data[i].size;
54        sumx += data[i].position.x * size;
55        sumy += data[i].position.y * size;
56        sumz += data[i].position.z * size;
57        sumsize += size;
58    }
59    sumsize = 1.0f / sumsize;
60    mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize);
61}
62
63void
[3492]64PCASplit::computeCovariant(Point *data, int count, const Vector3f& mean,
[2832]65                           float *m)
[1028]66{
67    memset(m, 0, sizeof(float) * 9);
68
69    for (int i = 0; i < count; ++i) {
70        m[0] += (data[i].position.x - mean.x) * (data[i].position.x - mean.x);
71        m[1] += (data[i].position.x - mean.x) * (data[i].position.y - mean.y);
72        m[2] += (data[i].position.x - mean.x) * (data[i].position.z - mean.z);
73        m[4] += (data[i].position.y - mean.y) * (data[i].position.y - mean.y);
74        m[5] += (data[i].position.y - mean.y) * (data[i].position.z - mean.z);
75        m[8] += (data[i].position.z - mean.z) * (data[i].position.z - mean.z);
76    }
77   
78    float invCount = 1.0f / (count - 1);
79    m[0] *= invCount;
80    m[1] *= invCount;
81    m[2] *= invCount;
82    m[4] *= invCount;
83    m[5] *= invCount;
84    m[8] *= invCount;
85    m[3] = m[1];
86    m[6] = m[2];
87    m[7] = m[5];
88}
89
90void
[3492]91PCASplit::computeDistortion(Point *data, int count, const Vector3f& mean,
[1028]92                            float& distortion, float& finalSize)
93{
94    distortion = 0.0f;
95    finalSize = 0.0f;
96       
97    float maxSize = 0.0f;
98    float distance;
99    for (int i = 0; i < count; ++i) {
[2832]100        // sum
101        distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z);
102        distortion += distance;
103
104        if (data[i].size > maxSize) {
105            maxSize = data[i].size;
106        }
107
108        /*
109          finalSize += data[i].size * sqrt(distance);
110        */
111        if (distance > finalSize) {
112            finalSize = distance;
113        }
[1028]114    }
115    // equation 2
116    //finalSize = 0.5f * sqrt (finalSize) / (float) (count - 1) + maxSize;
117    finalSize = sqrt (finalSize) + maxSize;
118}
[2844]119
120void
[1028]121PCASplit::init()
122{
[2844]123    _curClusterNode = NULL;
[1028]124    _curClusterCount = 0;
125}
126
[2844]127Cluster *
[2832]128PCASplit::createClusterBlock(ClusterListNode *clusterList, int count, int level)
[1028]129{
130    static int cc = 0;
131    cc += count;
[2832]132    Cluster *clusterBlock = new Cluster[count];
[1028]133
134    _clusterHeader->numOfClusters[level - 1] = count;
135    _clusterHeader->startPointerCluster[level - 1] = clusterBlock;
136
[3452]137    TRACE("Cluster created %d [in level %d]:total %d", count, level, cc);
[2832]138
[1028]139    int i = 0;
[2832]140    ClusterListNode *clusterNode = clusterList;
[1028]141    while (clusterNode) {
[2832]142        clusterBlock[i].centroid = clusterList->data->centroid_t;
143        clusterBlock[i].level = level;
144        clusterBlock[i].scale = clusterList->data->scale_t;
145
146        clusterNode = clusterNode->next;
147        ++i;
[1028]148    }
149    if (count != i) {
[3452]150        ERROR("Problem walking clusterList: count: %d, i: %d", count, i);
[1028]151    }
152    return clusterBlock;
153}
154
[2832]155ClusterAccel *
156PCASplit::doIt(Point *data, int count)
[1028]157{
158    init();
159
160    _clusterHeader = new ClusterAccel(_maxLevel);
[2832]161
162    Cluster *root = new Cluster;
163    Cluster_t *cluster_t = new Cluster_t();
[1028]164    cluster_t->points_t = data;
165    cluster_t->numOfPoints_t = count;
166    root->level = 1;
[2832]167
[1028]168    _clusterHeader->root = root;
169    _clusterHeader->numOfClusters[0] = 1;
170    _clusterHeader->startPointerCluster[0] = root;
[2832]171
[3492]172    Vector3f mean;
[1028]173    float distortion, scale;
[2832]174
[1028]175    computeCentroid(cluster_t->points_t, cluster_t->numOfPoints_t, mean);
176    computeDistortion(cluster_t->points_t, cluster_t->numOfPoints_t, mean,
177                      distortion, scale);
[2832]178
[1028]179    cluster_t->centroid_t = mean;
180    cluster_t->scale_t = scale;
[2832]181
[1028]182    float mindistance = _minDistance;
183    int level = 2;
184
[2832]185    ClusterListNode *clustNodeList = &(_memClusterChunk2[0]);
[1028]186    clustNodeList->data = cluster_t;
[2844]187    clustNodeList->next = NULL;
[2832]188
[1028]189    _curRoot = root;
190    do {
191        analyze(clustNodeList, _curRoot, level, mindistance);
192
193        mindistance *= _distanceScale;
194        ++level;
195
196        // swap memory buffer & init
[2844]197        _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1) ?
[1028]198            _memClusterChunk2 : _memClusterChunk1;
199        _memClusterChunkIndex = 0;
200
201        clustNodeList = _curClusterNode;
202    } while (level <= _maxLevel);
203
204    return _clusterHeader;
205}
206
207void
[2832]208PCASplit::addLeafCluster(Cluster_t *cluster)
[1028]209{
[2832]210    ClusterListNode *clusterNode =
211        &_curMemClusterChunk[_memClusterChunkIndex++];
[1028]212    clusterNode->data = cluster;
213    clusterNode->next = _curClusterNode;
214    _curClusterNode = clusterNode;
215    ++_curClusterCount;
216}
217
218void
[2832]219PCASplit::split(Point *data, int count, float limit)
[1028]220{
[3492]221    Vector3f mean;
[1028]222    float m[9];
223
224    computeCentroid(data, count, mean);
225    float scale, distortion;
226    computeDistortion(data, count, mean, distortion, scale);
[2832]227
[1028]228    //if (distortion < limit)
229    if (scale < limit) {
[2832]230        Cluster_t *cluster_t = new Cluster_t();
[1028]231        cluster_t->centroid_t = mean;
232        cluster_t->points_t = data;
233        cluster_t->numOfPoints_t = count;
234        cluster_t->scale_t = scale;
235        addLeafCluster(cluster_t);
236        return;
237    }
[2832]238
[1028]239    computeCovariant(data, count, mean, m);
[2832]240
[2844]241    // Begin newmat11 dependency
242
[1028]243    SymmetricMatrix A(3);
244    for (int i = 1; i <= 3; ++i) {
245        for (int j = 1; j <= i; ++j) {
246            A(i, j) = m[(i - 1) * 3 + j - 1];
247        }
248    }
[2832]249
250    Matrix U;
251    DiagonalMatrix D;
[2844]252    eigenvalues(A, D ,U);
[3492]253    Vector3f emax(U(1, 3), U(2, 3), U(3, 3));
[2832]254
[2844]255    // End newmat11 dependency
256
[1028]257    int left = 0, right = count - 1;
[2832]258
[1028]259    Point p;
260    for (;left < right;) {
[2832]261        while (left < count && emax.dot(data[left].position - mean) >= 0.0f) {
262            ++left;
263        }
264        while (right >= 0 && emax.dot(data[right].position - mean) < 0.0f) {
265            --right;
266        }
267        if (left > right) {
268            break;
269        }
270
271        p = data[left];
272        data[left] = data[right];
273        data[right] = p;
274        ++left, --right;
[1028]275    }
[2832]276
[1028]277    if (left == 0 || right == count - 1) {
[3452]278        TRACE("error");
[2832]279        exit(1);
[1028]280    } else {
[2832]281        split(data, left, limit);
282        split(data + left, count - left, limit);
[1028]283    }
284}
285
286void
[2832]287PCASplit::analyze(ClusterListNode *clusterNode, Cluster *parent, int level,
[1028]288                  float limit)
289{
290    if (level > _maxLevel) {
[2832]291        return;
[1028]292    }
293    if (level > _finalMaxLevel) {
[2832]294        _finalMaxLevel = level;
[1028]295    }
[2832]296
[1028]297    init();
298
[2832]299    ClusterListNode *clNode = clusterNode;
300
[1028]301    // initialize the indexCount of indices
302    _indexCount = 0;
303    while (clNode) {
[2832]304        if (clNode->data) {
305            split(clNode->data->points_t, clNode->data->numOfPoints_t, limit);
306            _indices[_indexCount++] = _curClusterCount;
307        }
308        clNode = clNode->next;
[1028]309    }
[2832]310
[3492]311    //Vector3f mean;
[1028]312    //computeCentroid(cluster->points, cluster->numOfPoints, mean);
313
314    // the process values of split are in _curClusterNode and _curClusterCount
[2832]315    ClusterListNode *curClusterNode = _curClusterNode;
[1028]316    unsigned int curClusterNodeCount = _curClusterCount;
317
318    if (curClusterNodeCount) {
319        // create and init centroid
[2832]320        Cluster *retClusterBlock =
321            createClusterBlock(curClusterNode, curClusterNodeCount, level);
322
323        _curRoot = retClusterBlock;
324
325        if (level == _maxLevel) {
326            ClusterListNode *node = curClusterNode;
327            if (_indexCount > 0) {
328                // for parent
329                Point *points = new Point[curClusterNodeCount];
330
331                parent[0].setChildren(retClusterBlock, _indices[0]);
332                parent[0].setPoints(points, _indices[0]);
333
334                for (unsigned int i = 0, in = 0; i < curClusterNodeCount; ++i) {
335                    if (i >= _indices[in]) {
336                        in++;
337                        parent[in].setChildren(retClusterBlock + _indices[in - 1], _indices[in] - _indices[in - 1]);
338                        parent[in].setPoints(points + _indices[in - 1], _indices[in] - _indices[in - 1]);
339                    }
340
341                    retClusterBlock[i].scale = node->data->scale_t;
342                    retClusterBlock[i].centroid = node->data->centroid_t;
343                    retClusterBlock[i].points = node->data->points_t;
344                    retClusterBlock[i].numOfPoints = node->data->numOfPoints_t;
345                    retClusterBlock[i].color.set(float(rand()) / RAND_MAX,
346                                                 float(rand()) / RAND_MAX,
347                                                 float(rand()) / RAND_MAX,
348                                                 0.2);
349
350                    points[i].position = node->data->centroid_t;
351                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
352                    points[i].size = node->data->scale_t;
353
354                    node = node->next;
355                }
356            }
357        } else {
358            Point *points = new Point[curClusterNodeCount];
359            ClusterListNode *node = curClusterNode;
360
361            if (_indexCount > 0) {
362                parent[0].setPoints(points, _indices[0]);
363                parent[0].setChildren(retClusterBlock, _indices[0]);
364                for (int k = 1; k < _indexCount; ++k) {
365                    parent[k].setPoints(points + _indices[k - 1],
366                                        _indices[k] - _indices[k - 1]);
367                    parent[k].setChildren(retClusterBlock + _indices[k - 1],
368                                          _indices[k] - _indices[k - 1]);
369                }
370
371                // set points of sub-clusters
372                for (unsigned int i = 0; i < curClusterNodeCount; ++i) {
373                    points[i].position = node->data->centroid_t;
374                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
375                    points[i].size = node->data->scale_t;
376                    node = node->next;
377                }
378            }
379        }
[1028]380    }
381}
Note: See TracBrowser for help on using the repository browser.