source: trunk/packages/vizservers/nanovis/PCASplit.cpp @ 2832

Last change on this file since 2832 was 2832, checked in by ldelgass, 13 years ago

Formatting fixes

  • Property svn:eol-style set to native
File size: 10.6 KB
RevLine 
[2798]1/* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
[1028]2#include <stdio.h>
3
4#define WANT_STREAM                  // include.h will get stream fns
5#define WANT_MATH                    // include.h will get math fns
6                                     // newmatap.h will get include.h
7#include <newmatap.h>                // need matrix applications
8#include <newmatio.h>                // need matrix output routines
9#include <newmatrc.h>
10
[2832]11#include "PCASplit.h"
12
[1028]13#ifdef use_namespace
14using namespace NEWMAT;              // access NEWMAT namespace
15#endif
16
[2832]17using namespace PCA;
[1028]18
19PCASplit::PCASplit() :
[2832]20    _maxLevel(4),
21    _minDistance(0.5f),
22    _distanceScale(0.2f),
23    _indexCount(0),
[1028]24    _finalMaxLevel(0)
25{
26    _indices = new unsigned int[MAX_INDEX];
27    _memClusterChunk1 = new ClusterListNode[MAX_INDEX];
28    _memClusterChunk2 = new ClusterListNode[MAX_INDEX];
29    _curMemClusterChunk = _memClusterChunk1;
30    _memClusterChunkIndex = 0;
31}
32
33PCASplit::~PCASplit()
34{
35    delete [] _indices;
36    delete [] _memClusterChunk1;
37    delete [] _memClusterChunk2;
38}
39
40void
[2832]41PCASplit::computeCentroid(Point *data, int count, Vector3& mean)
[1028]42{
[2832]43    float sumx = 0, sumy = 0, sumz = 0;
[1028]44    float size = 0;
45    float sumsize = 0;
46    for (int i = 0; i < count; ++i) {
47        size = data[i].size;
48        sumx += data[i].position.x * size;
49        sumy += data[i].position.y * size;
50        sumz += data[i].position.z * size;
51        sumsize += size;
52    }
53    sumsize = 1.0f / sumsize;
54    mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize);
55}
56
57void
[2832]58PCASplit::computeCovariant(Point *data, int count, const Vector3& mean,
59                           float *m)
[1028]60{
61    memset(m, 0, sizeof(float) * 9);
62
63    for (int i = 0; i < count; ++i) {
64        m[0] += (data[i].position.x - mean.x) * (data[i].position.x - mean.x);
65        m[1] += (data[i].position.x - mean.x) * (data[i].position.y - mean.y);
66        m[2] += (data[i].position.x - mean.x) * (data[i].position.z - mean.z);
67        m[4] += (data[i].position.y - mean.y) * (data[i].position.y - mean.y);
68        m[5] += (data[i].position.y - mean.y) * (data[i].position.z - mean.z);
69        m[8] += (data[i].position.z - mean.z) * (data[i].position.z - mean.z);
70    }
71   
72    float invCount = 1.0f / (count - 1);
73    m[0] *= invCount;
74    m[1] *= invCount;
75    m[2] *= invCount;
76    m[4] *= invCount;
77    m[5] *= invCount;
78    m[8] *= invCount;
79    m[3] = m[1];
80    m[6] = m[2];
81    m[7] = m[5];
82}
83
84void
[2832]85PCASplit::computeDistortion(Point *data, int count, const Vector3& mean,
[1028]86                            float& distortion, float& finalSize)
87{
88    distortion = 0.0f;
89    finalSize = 0.0f;
90       
91    float maxSize = 0.0f;
92    float distance;
93    for (int i = 0; i < count; ++i) {
[2832]94        // sum
95        distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z);
96        distortion += distance;
97
98        if (data[i].size > maxSize) {
99            maxSize = data[i].size;
100        }
101
102        /*
103          finalSize += data[i].size * sqrt(distance);
104        */
105        if (distance > finalSize) {
106            finalSize = distance;
107        }
[1028]108    }
109    // equation 2
110    //finalSize = 0.5f * sqrt (finalSize) / (float) (count - 1) + maxSize;
111    finalSize = sqrt (finalSize) + maxSize;
112}
[2832]113     
[1028]114void
115PCASplit::init()
116{
117    _curClusterNode = 0;
118    _curClusterCount = 0;
119}
120
[2832]121Cluster *
122PCASplit::createClusterBlock(ClusterListNode *clusterList, int count, int level)
[1028]123{
124    static int cc = 0;
125    cc += count;
[2832]126    Cluster *clusterBlock = new Cluster[count];
[1028]127
128    _clusterHeader->numOfClusters[level - 1] = count;
129    _clusterHeader->startPointerCluster[level - 1] = clusterBlock;
130
[2376]131    TRACE("Cluster created %d [in level %d]:total %d\n", count, level, cc);
[2832]132
[1028]133    int i = 0;
[2832]134    ClusterListNode *clusterNode = clusterList;
[1028]135    while (clusterNode) {
[2832]136        clusterBlock[i].centroid = clusterList->data->centroid_t;
137        clusterBlock[i].level = level;
138        clusterBlock[i].scale = clusterList->data->scale_t;
139
140        clusterNode = clusterNode->next;
141        ++i;
[1028]142    }
143    if (count != i) {
[2832]144        ERROR("Problem walking clusterList: count: %d, i: %d\n", count, i);
[1028]145    }
146    return clusterBlock;
147}
148
[2832]149ClusterAccel *
150PCASplit::doIt(Point *data, int count)
[1028]151{
152    init();
153
154    _clusterHeader = new ClusterAccel(_maxLevel);
[2832]155
156    Cluster *root = new Cluster;
157    Cluster_t *cluster_t = new Cluster_t();
[1028]158    cluster_t->points_t = data;
159    cluster_t->numOfPoints_t = count;
160    root->level = 1;
[2832]161
[1028]162    _clusterHeader->root = root;
163    _clusterHeader->numOfClusters[0] = 1;
164    _clusterHeader->startPointerCluster[0] = root;
[2832]165
[1028]166    Vector3 mean;
167    float distortion, scale;
[2832]168
[1028]169    computeCentroid(cluster_t->points_t, cluster_t->numOfPoints_t, mean);
170    computeDistortion(cluster_t->points_t, cluster_t->numOfPoints_t, mean,
171                      distortion, scale);
[2832]172
[1028]173    cluster_t->centroid_t = mean;
174    cluster_t->scale_t = scale;
[2832]175
[1028]176    float mindistance = _minDistance;
177    int level = 2;
178
[2832]179    ClusterListNode *clustNodeList = &(_memClusterChunk2[0]);
[1028]180    clustNodeList->data = cluster_t;
181    clustNodeList->next = 0;
[2832]182
[1028]183    _curRoot = root;
184    do {
185        analyze(clustNodeList, _curRoot, level, mindistance);
186
187        mindistance *= _distanceScale;
188        ++level;
189
190        // swap memory buffer & init
191        _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1)?
192            _memClusterChunk2 : _memClusterChunk1;
193        _memClusterChunkIndex = 0;
194
195        clustNodeList = _curClusterNode;
196    } while (level <= _maxLevel);
197
198    return _clusterHeader;
199}
200
201void
[2832]202PCASplit::addLeafCluster(Cluster_t *cluster)
[1028]203{
[2832]204    ClusterListNode *clusterNode =
205        &_curMemClusterChunk[_memClusterChunkIndex++];
[1028]206    clusterNode->data = cluster;
207    clusterNode->next = _curClusterNode;
208    _curClusterNode = clusterNode;
209    ++_curClusterCount;
210}
211
212void
[2832]213PCASplit::split(Point *data, int count, float limit)
[1028]214{
215    Vector3 mean;
216    float m[9];
217
218    computeCentroid(data, count, mean);
219    float scale, distortion;
220    computeDistortion(data, count, mean, distortion, scale);
[2832]221
[1028]222    //if (distortion < limit)
223    if (scale < limit) {
[2832]224        Cluster_t *cluster_t = new Cluster_t();
[1028]225        cluster_t->centroid_t = mean;
226        cluster_t->points_t = data;
227        cluster_t->numOfPoints_t = count;
228        cluster_t->scale_t = scale;
229        addLeafCluster(cluster_t);
230        return;
231    }
[2832]232
[1028]233    computeCovariant(data, count, mean, m);
[2832]234
[1028]235    SymmetricMatrix A(3);
236    for (int i = 1; i <= 3; ++i) {
237        for (int j = 1; j <= i; ++j) {
238            A(i, j) = m[(i - 1) * 3 + j - 1];
239        }
240    }
[2832]241
242    Matrix U;
243    DiagonalMatrix D;
[1028]244    eigenvalues(A,D,U);
245    Vector3 emax(U(1, 3), U(2, 3), U(3, 3));
[2832]246
[1028]247    int left = 0, right = count - 1;
[2832]248
[1028]249    Point p;
250    for (;left < right;) {
[2832]251        while (left < count && emax.dot(data[left].position - mean) >= 0.0f) {
252            ++left;
253        }
254        while (right >= 0 && emax.dot(data[right].position - mean) < 0.0f) {
255            --right;
256        }
257        if (left > right) {
258            break;
259        }
260
261        p = data[left];
262        data[left] = data[right];
263        data[right] = p;
264        ++left, --right;
[1028]265    }
[2832]266
[1028]267    if (left == 0 || right == count - 1) {
[2832]268        TRACE("error\n");
269        exit(1);
[1028]270    } else {
[2832]271        split(data, left, limit);
272        split(data + left, count - left, limit);
[1028]273    }
274}
275
276void
[2832]277PCASplit::analyze(ClusterListNode *clusterNode, Cluster *parent, int level,
[1028]278                  float limit)
279{
280    if (level > _maxLevel) {
[2832]281        return;
[1028]282    }
283    if (level > _finalMaxLevel) {
[2832]284        _finalMaxLevel = level;
[1028]285    }
[2832]286
[1028]287    init();
288
[2832]289    ClusterListNode *clNode = clusterNode;
290
[1028]291    // initialize the indexCount of indices
292    _indexCount = 0;
293    while (clNode) {
[2832]294        if (clNode->data) {
295            split(clNode->data->points_t, clNode->data->numOfPoints_t, limit);
296            _indices[_indexCount++] = _curClusterCount;
297        }
298        clNode = clNode->next;
[1028]299    }
[2832]300
[1028]301    //Vector3 mean;
302    //computeCentroid(cluster->points, cluster->numOfPoints, mean);
303
304    // the process values of split are in _curClusterNode and _curClusterCount
[2832]305    ClusterListNode *curClusterNode = _curClusterNode;
[1028]306    unsigned int curClusterNodeCount = _curClusterCount;
307
308    if (curClusterNodeCount) {
309        // create and init centroid
[2832]310        Cluster *retClusterBlock =
311            createClusterBlock(curClusterNode, curClusterNodeCount, level);
312
313        _curRoot = retClusterBlock;
314
315        if (level == _maxLevel) {
316            ClusterListNode *node = curClusterNode;
317            if (_indexCount > 0) {
318                // for parent
319                Point *points = new Point[curClusterNodeCount];
320
321                parent[0].setChildren(retClusterBlock, _indices[0]);
322                parent[0].setPoints(points, _indices[0]);
323
324                for (unsigned int i = 0, in = 0; i < curClusterNodeCount; ++i) {
325                    if (i >= _indices[in]) {
326                        in++;
327                        parent[in].setChildren(retClusterBlock + _indices[in - 1], _indices[in] - _indices[in - 1]);
328                        parent[in].setPoints(points + _indices[in - 1], _indices[in] - _indices[in - 1]);
329                    }
330
331                    retClusterBlock[i].scale = node->data->scale_t;
332                    retClusterBlock[i].centroid = node->data->centroid_t;
333                    retClusterBlock[i].points = node->data->points_t;
334                    retClusterBlock[i].numOfPoints = node->data->numOfPoints_t;
335                    retClusterBlock[i].color.set(float(rand()) / RAND_MAX,
336                                                 float(rand()) / RAND_MAX,
337                                                 float(rand()) / RAND_MAX,
338                                                 0.2);
339
340                    points[i].position = node->data->centroid_t;
341                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
342                    points[i].size = node->data->scale_t;
343
344                    node = node->next;
345                }
346            }
347        } else {
348            Point *points = new Point[curClusterNodeCount];
349            ClusterListNode *node = curClusterNode;
350
351            if (_indexCount > 0) {
352                parent[0].setPoints(points, _indices[0]);
353                parent[0].setChildren(retClusterBlock, _indices[0]);
354                for (int k = 1; k < _indexCount; ++k) {
355                    parent[k].setPoints(points + _indices[k - 1],
356                                        _indices[k] - _indices[k - 1]);
357                    parent[k].setChildren(retClusterBlock + _indices[k - 1],
358                                          _indices[k] - _indices[k - 1]);
359                }
360
361                // set points of sub-clusters
362                for (unsigned int i = 0; i < curClusterNodeCount; ++i) {
363                    points[i].position = node->data->centroid_t;
364                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
365                    points[i].size = node->data->scale_t;
366                    node = node->next;
367                }
368            }
369        }
[1028]370    }
371}
372
Note: See TracBrowser for help on using the repository browser.