source: nanovis/tags/1.1.1/PCASplit.cpp @ 4930

Last change on this file since 4930 was 3502, checked in by ldelgass, 11 years ago

Add basic VTK structured points reader to nanovis, update copyright dates.

  • Property svn:eol-style set to native
File size: 10.8 KB
Line 
1/* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
2/*
3 * Copyright (c) 2004-2013  HUBzero Foundation, LLC
4 *
5 */
6#include <stdio.h>
7
8#define WANT_STREAM                  // include.h will get stream fns
9#define WANT_MATH                    // include.h will get math fns
10                                     // newmatap.h will get include.h
11#include <newmatap.h>                // need matrix applications
12#include <newmatio.h>                // need matrix output routines
13#include <newmatrc.h>
14
15#include "PCASplit.h"
16
17using namespace vrmath;
18
19#ifdef use_namespace
20using namespace NEWMAT;              // access NEWMAT namespace
21#endif
22
23using namespace PCA;
24
25PCASplit::PCASplit() :
26    _maxLevel(4),
27    _minDistance(0.5f),
28    _distanceScale(0.2f),
29    _indexCount(0),
30    _finalMaxLevel(0)
31{
32    _indices = new unsigned int[MAX_INDEX];
33    _memClusterChunk1 = new ClusterListNode[MAX_INDEX];
34    _memClusterChunk2 = new ClusterListNode[MAX_INDEX];
35    _curMemClusterChunk = _memClusterChunk1;
36    _memClusterChunkIndex = 0;
37}
38
39PCASplit::~PCASplit()
40{
41    delete [] _indices;
42    delete [] _memClusterChunk1;
43    delete [] _memClusterChunk2;
44}
45
46void
47PCASplit::computeCentroid(Point *data, int count, Vector3f& mean)
48{
49    float sumx = 0, sumy = 0, sumz = 0;
50    float size = 0;
51    float sumsize = 0;
52    for (int i = 0; i < count; ++i) {
53        size = data[i].size;
54        sumx += data[i].position.x * size;
55        sumy += data[i].position.y * size;
56        sumz += data[i].position.z * size;
57        sumsize += size;
58    }
59    sumsize = 1.0f / sumsize;
60    mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize);
61}
62
63void
64PCASplit::computeCovariant(Point *data, int count, const Vector3f& mean,
65                           float *m)
66{
67    memset(m, 0, sizeof(float) * 9);
68
69    for (int i = 0; i < count; ++i) {
70        m[0] += (data[i].position.x - mean.x) * (data[i].position.x - mean.x);
71        m[1] += (data[i].position.x - mean.x) * (data[i].position.y - mean.y);
72        m[2] += (data[i].position.x - mean.x) * (data[i].position.z - mean.z);
73        m[4] += (data[i].position.y - mean.y) * (data[i].position.y - mean.y);
74        m[5] += (data[i].position.y - mean.y) * (data[i].position.z - mean.z);
75        m[8] += (data[i].position.z - mean.z) * (data[i].position.z - mean.z);
76    }
77   
78    float invCount = 1.0f / (count - 1);
79    m[0] *= invCount;
80    m[1] *= invCount;
81    m[2] *= invCount;
82    m[4] *= invCount;
83    m[5] *= invCount;
84    m[8] *= invCount;
85    m[3] = m[1];
86    m[6] = m[2];
87    m[7] = m[5];
88}
89
90void
91PCASplit::computeDistortion(Point *data, int count, const Vector3f& mean,
92                            float& distortion, float& finalSize)
93{
94    distortion = 0.0f;
95    finalSize = 0.0f;
96       
97    float maxSize = 0.0f;
98    float distance;
99    for (int i = 0; i < count; ++i) {
100        // sum
101        distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z);
102        distortion += distance;
103
104        if (data[i].size > maxSize) {
105            maxSize = data[i].size;
106        }
107
108        /*
109          finalSize += data[i].size * sqrt(distance);
110        */
111        if (distance > finalSize) {
112            finalSize = distance;
113        }
114    }
115    // equation 2
116    //finalSize = 0.5f * sqrt (finalSize) / (float) (count - 1) + maxSize;
117    finalSize = sqrt (finalSize) + maxSize;
118}
119
120void
121PCASplit::init()
122{
123    _curClusterNode = NULL;
124    _curClusterCount = 0;
125}
126
127Cluster *
128PCASplit::createClusterBlock(ClusterListNode *clusterList, int count, int level)
129{
130    static int cc = 0;
131    cc += count;
132    Cluster *clusterBlock = new Cluster[count];
133
134    _clusterHeader->numOfClusters[level - 1] = count;
135    _clusterHeader->startPointerCluster[level - 1] = clusterBlock;
136
137    TRACE("Cluster created %d [in level %d]:total %d", count, level, cc);
138
139    int i = 0;
140    ClusterListNode *clusterNode = clusterList;
141    while (clusterNode) {
142        clusterBlock[i].centroid = clusterList->data->centroid_t;
143        clusterBlock[i].level = level;
144        clusterBlock[i].scale = clusterList->data->scale_t;
145
146        clusterNode = clusterNode->next;
147        ++i;
148    }
149    if (count != i) {
150        ERROR("Problem walking clusterList: count: %d, i: %d", count, i);
151    }
152    return clusterBlock;
153}
154
155ClusterAccel *
156PCASplit::doIt(Point *data, int count)
157{
158    init();
159
160    _clusterHeader = new ClusterAccel(_maxLevel);
161
162    Cluster *root = new Cluster;
163    Cluster_t *cluster_t = new Cluster_t();
164    cluster_t->points_t = data;
165    cluster_t->numOfPoints_t = count;
166    root->level = 1;
167
168    _clusterHeader->root = root;
169    _clusterHeader->numOfClusters[0] = 1;
170    _clusterHeader->startPointerCluster[0] = root;
171
172    Vector3f mean;
173    float distortion, scale;
174
175    computeCentroid(cluster_t->points_t, cluster_t->numOfPoints_t, mean);
176    computeDistortion(cluster_t->points_t, cluster_t->numOfPoints_t, mean,
177                      distortion, scale);
178
179    cluster_t->centroid_t = mean;
180    cluster_t->scale_t = scale;
181
182    float mindistance = _minDistance;
183    int level = 2;
184
185    ClusterListNode *clustNodeList = &(_memClusterChunk2[0]);
186    clustNodeList->data = cluster_t;
187    clustNodeList->next = NULL;
188
189    _curRoot = root;
190    do {
191        analyze(clustNodeList, _curRoot, level, mindistance);
192
193        mindistance *= _distanceScale;
194        ++level;
195
196        // swap memory buffer & init
197        _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1) ?
198            _memClusterChunk2 : _memClusterChunk1;
199        _memClusterChunkIndex = 0;
200
201        clustNodeList = _curClusterNode;
202    } while (level <= _maxLevel);
203
204    return _clusterHeader;
205}
206
207void
208PCASplit::addLeafCluster(Cluster_t *cluster)
209{
210    ClusterListNode *clusterNode =
211        &_curMemClusterChunk[_memClusterChunkIndex++];
212    clusterNode->data = cluster;
213    clusterNode->next = _curClusterNode;
214    _curClusterNode = clusterNode;
215    ++_curClusterCount;
216}
217
218void
219PCASplit::split(Point *data, int count, float limit)
220{
221    Vector3f mean;
222    float m[9];
223
224    computeCentroid(data, count, mean);
225    float scale, distortion;
226    computeDistortion(data, count, mean, distortion, scale);
227
228    //if (distortion < limit)
229    if (scale < limit) {
230        Cluster_t *cluster_t = new Cluster_t();
231        cluster_t->centroid_t = mean;
232        cluster_t->points_t = data;
233        cluster_t->numOfPoints_t = count;
234        cluster_t->scale_t = scale;
235        addLeafCluster(cluster_t);
236        return;
237    }
238
239    computeCovariant(data, count, mean, m);
240
241    // Begin newmat11 dependency
242
243    SymmetricMatrix A(3);
244    for (int i = 1; i <= 3; ++i) {
245        for (int j = 1; j <= i; ++j) {
246            A(i, j) = m[(i - 1) * 3 + j - 1];
247        }
248    }
249
250    Matrix U;
251    DiagonalMatrix D;
252    eigenvalues(A, D ,U);
253    Vector3f emax(U(1, 3), U(2, 3), U(3, 3));
254
255    // End newmat11 dependency
256
257    int left = 0, right = count - 1;
258
259    Point p;
260    for (;left < right;) {
261        while (left < count && emax.dot(data[left].position - mean) >= 0.0f) {
262            ++left;
263        }
264        while (right >= 0 && emax.dot(data[right].position - mean) < 0.0f) {
265            --right;
266        }
267        if (left > right) {
268            break;
269        }
270
271        p = data[left];
272        data[left] = data[right];
273        data[right] = p;
274        ++left, --right;
275    }
276
277    if (left == 0 || right == count - 1) {
278        TRACE("error");
279        exit(1);
280    } else {
281        split(data, left, limit);
282        split(data + left, count - left, limit);
283    }
284}
285
286void
287PCASplit::analyze(ClusterListNode *clusterNode, Cluster *parent, int level,
288                  float limit)
289{
290    if (level > _maxLevel) {
291        return;
292    }
293    if (level > _finalMaxLevel) {
294        _finalMaxLevel = level;
295    }
296
297    init();
298
299    ClusterListNode *clNode = clusterNode;
300
301    // initialize the indexCount of indices
302    _indexCount = 0;
303    while (clNode) {
304        if (clNode->data) {
305            split(clNode->data->points_t, clNode->data->numOfPoints_t, limit);
306            _indices[_indexCount++] = _curClusterCount;
307        }
308        clNode = clNode->next;
309    }
310
311    //Vector3f mean;
312    //computeCentroid(cluster->points, cluster->numOfPoints, mean);
313
314    // the process values of split are in _curClusterNode and _curClusterCount
315    ClusterListNode *curClusterNode = _curClusterNode;
316    unsigned int curClusterNodeCount = _curClusterCount;
317
318    if (curClusterNodeCount) {
319        // create and init centroid
320        Cluster *retClusterBlock =
321            createClusterBlock(curClusterNode, curClusterNodeCount, level);
322
323        _curRoot = retClusterBlock;
324
325        if (level == _maxLevel) {
326            ClusterListNode *node = curClusterNode;
327            if (_indexCount > 0) {
328                // for parent
329                Point *points = new Point[curClusterNodeCount];
330
331                parent[0].setChildren(retClusterBlock, _indices[0]);
332                parent[0].setPoints(points, _indices[0]);
333
334                for (unsigned int i = 0, in = 0; i < curClusterNodeCount; ++i) {
335                    if (i >= _indices[in]) {
336                        in++;
337                        parent[in].setChildren(retClusterBlock + _indices[in - 1], _indices[in] - _indices[in - 1]);
338                        parent[in].setPoints(points + _indices[in - 1], _indices[in] - _indices[in - 1]);
339                    }
340
341                    retClusterBlock[i].scale = node->data->scale_t;
342                    retClusterBlock[i].centroid = node->data->centroid_t;
343                    retClusterBlock[i].points = node->data->points_t;
344                    retClusterBlock[i].numOfPoints = node->data->numOfPoints_t;
345                    retClusterBlock[i].color.set(float(rand()) / RAND_MAX,
346                                                 float(rand()) / RAND_MAX,
347                                                 float(rand()) / RAND_MAX,
348                                                 0.2);
349
350                    points[i].position = node->data->centroid_t;
351                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
352                    points[i].size = node->data->scale_t;
353
354                    node = node->next;
355                }
356            }
357        } else {
358            Point *points = new Point[curClusterNodeCount];
359            ClusterListNode *node = curClusterNode;
360
361            if (_indexCount > 0) {
362                parent[0].setPoints(points, _indices[0]);
363                parent[0].setChildren(retClusterBlock, _indices[0]);
364                for (int k = 1; k < _indexCount; ++k) {
365                    parent[k].setPoints(points + _indices[k - 1],
366                                        _indices[k] - _indices[k - 1]);
367                    parent[k].setChildren(retClusterBlock + _indices[k - 1],
368                                          _indices[k] - _indices[k - 1]);
369                }
370
371                // set points of sub-clusters
372                for (unsigned int i = 0; i < curClusterNodeCount; ++i) {
373                    points[i].position = node->data->centroid_t;
374                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
375                    points[i].size = node->data->scale_t;
376                    node = node->next;
377                }
378            }
379        }
380    }
381}
Note: See TracBrowser for help on using the repository browser.