source: trunk/packages/vizservers/nanovis/PCASplit.cpp @ 3452

Last change on this file since 3452 was 3452, checked in by ldelgass, 12 years ago

Remove XINETD define from nanovis. We only support server mode now, no glut
interaction loop with mouse/keyboard handlers. Fixes for trace logging to make
output closer to vtkvis: inlcude function name for trace messages, remove
newlines from format strings in macros since newlines get added by syslog.

  • Property svn:eol-style set to native
File size: 10.7 KB
Line 
1/* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
2#include <stdio.h>
3
4#define WANT_STREAM                  // include.h will get stream fns
5#define WANT_MATH                    // include.h will get math fns
6                                     // newmatap.h will get include.h
7#include <newmatap.h>                // need matrix applications
8#include <newmatio.h>                // need matrix output routines
9#include <newmatrc.h>
10
11#include "PCASplit.h"
12
13#ifdef use_namespace
14using namespace NEWMAT;              // access NEWMAT namespace
15#endif
16
17using namespace PCA;
18
19PCASplit::PCASplit() :
20    _maxLevel(4),
21    _minDistance(0.5f),
22    _distanceScale(0.2f),
23    _indexCount(0),
24    _finalMaxLevel(0)
25{
26    _indices = new unsigned int[MAX_INDEX];
27    _memClusterChunk1 = new ClusterListNode[MAX_INDEX];
28    _memClusterChunk2 = new ClusterListNode[MAX_INDEX];
29    _curMemClusterChunk = _memClusterChunk1;
30    _memClusterChunkIndex = 0;
31}
32
33PCASplit::~PCASplit()
34{
35    delete [] _indices;
36    delete [] _memClusterChunk1;
37    delete [] _memClusterChunk2;
38}
39
40void
41PCASplit::computeCentroid(Point *data, int count, Vector3& mean)
42{
43    float sumx = 0, sumy = 0, sumz = 0;
44    float size = 0;
45    float sumsize = 0;
46    for (int i = 0; i < count; ++i) {
47        size = data[i].size;
48        sumx += data[i].position.x * size;
49        sumy += data[i].position.y * size;
50        sumz += data[i].position.z * size;
51        sumsize += size;
52    }
53    sumsize = 1.0f / sumsize;
54    mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize);
55}
56
57void
58PCASplit::computeCovariant(Point *data, int count, const Vector3& mean,
59                           float *m)
60{
61    memset(m, 0, sizeof(float) * 9);
62
63    for (int i = 0; i < count; ++i) {
64        m[0] += (data[i].position.x - mean.x) * (data[i].position.x - mean.x);
65        m[1] += (data[i].position.x - mean.x) * (data[i].position.y - mean.y);
66        m[2] += (data[i].position.x - mean.x) * (data[i].position.z - mean.z);
67        m[4] += (data[i].position.y - mean.y) * (data[i].position.y - mean.y);
68        m[5] += (data[i].position.y - mean.y) * (data[i].position.z - mean.z);
69        m[8] += (data[i].position.z - mean.z) * (data[i].position.z - mean.z);
70    }
71   
72    float invCount = 1.0f / (count - 1);
73    m[0] *= invCount;
74    m[1] *= invCount;
75    m[2] *= invCount;
76    m[4] *= invCount;
77    m[5] *= invCount;
78    m[8] *= invCount;
79    m[3] = m[1];
80    m[6] = m[2];
81    m[7] = m[5];
82}
83
84void
85PCASplit::computeDistortion(Point *data, int count, const Vector3& mean,
86                            float& distortion, float& finalSize)
87{
88    distortion = 0.0f;
89    finalSize = 0.0f;
90       
91    float maxSize = 0.0f;
92    float distance;
93    for (int i = 0; i < count; ++i) {
94        // sum
95        distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z);
96        distortion += distance;
97
98        if (data[i].size > maxSize) {
99            maxSize = data[i].size;
100        }
101
102        /*
103          finalSize += data[i].size * sqrt(distance);
104        */
105        if (distance > finalSize) {
106            finalSize = distance;
107        }
108    }
109    // equation 2
110    //finalSize = 0.5f * sqrt (finalSize) / (float) (count - 1) + maxSize;
111    finalSize = sqrt (finalSize) + maxSize;
112}
113
114void
115PCASplit::init()
116{
117    _curClusterNode = NULL;
118    _curClusterCount = 0;
119}
120
121Cluster *
122PCASplit::createClusterBlock(ClusterListNode *clusterList, int count, int level)
123{
124    static int cc = 0;
125    cc += count;
126    Cluster *clusterBlock = new Cluster[count];
127
128    _clusterHeader->numOfClusters[level - 1] = count;
129    _clusterHeader->startPointerCluster[level - 1] = clusterBlock;
130
131    TRACE("Cluster created %d [in level %d]:total %d", count, level, cc);
132
133    int i = 0;
134    ClusterListNode *clusterNode = clusterList;
135    while (clusterNode) {
136        clusterBlock[i].centroid = clusterList->data->centroid_t;
137        clusterBlock[i].level = level;
138        clusterBlock[i].scale = clusterList->data->scale_t;
139
140        clusterNode = clusterNode->next;
141        ++i;
142    }
143    if (count != i) {
144        ERROR("Problem walking clusterList: count: %d, i: %d", count, i);
145    }
146    return clusterBlock;
147}
148
149ClusterAccel *
150PCASplit::doIt(Point *data, int count)
151{
152    init();
153
154    _clusterHeader = new ClusterAccel(_maxLevel);
155
156    Cluster *root = new Cluster;
157    Cluster_t *cluster_t = new Cluster_t();
158    cluster_t->points_t = data;
159    cluster_t->numOfPoints_t = count;
160    root->level = 1;
161
162    _clusterHeader->root = root;
163    _clusterHeader->numOfClusters[0] = 1;
164    _clusterHeader->startPointerCluster[0] = root;
165
166    Vector3 mean;
167    float distortion, scale;
168
169    computeCentroid(cluster_t->points_t, cluster_t->numOfPoints_t, mean);
170    computeDistortion(cluster_t->points_t, cluster_t->numOfPoints_t, mean,
171                      distortion, scale);
172
173    cluster_t->centroid_t = mean;
174    cluster_t->scale_t = scale;
175
176    float mindistance = _minDistance;
177    int level = 2;
178
179    ClusterListNode *clustNodeList = &(_memClusterChunk2[0]);
180    clustNodeList->data = cluster_t;
181    clustNodeList->next = NULL;
182
183    _curRoot = root;
184    do {
185        analyze(clustNodeList, _curRoot, level, mindistance);
186
187        mindistance *= _distanceScale;
188        ++level;
189
190        // swap memory buffer & init
191        _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1) ?
192            _memClusterChunk2 : _memClusterChunk1;
193        _memClusterChunkIndex = 0;
194
195        clustNodeList = _curClusterNode;
196    } while (level <= _maxLevel);
197
198    return _clusterHeader;
199}
200
201void
202PCASplit::addLeafCluster(Cluster_t *cluster)
203{
204    ClusterListNode *clusterNode =
205        &_curMemClusterChunk[_memClusterChunkIndex++];
206    clusterNode->data = cluster;
207    clusterNode->next = _curClusterNode;
208    _curClusterNode = clusterNode;
209    ++_curClusterCount;
210}
211
212void
213PCASplit::split(Point *data, int count, float limit)
214{
215    Vector3 mean;
216    float m[9];
217
218    computeCentroid(data, count, mean);
219    float scale, distortion;
220    computeDistortion(data, count, mean, distortion, scale);
221
222    //if (distortion < limit)
223    if (scale < limit) {
224        Cluster_t *cluster_t = new Cluster_t();
225        cluster_t->centroid_t = mean;
226        cluster_t->points_t = data;
227        cluster_t->numOfPoints_t = count;
228        cluster_t->scale_t = scale;
229        addLeafCluster(cluster_t);
230        return;
231    }
232
233    computeCovariant(data, count, mean, m);
234
235    // Begin newmat11 dependency
236
237    SymmetricMatrix A(3);
238    for (int i = 1; i <= 3; ++i) {
239        for (int j = 1; j <= i; ++j) {
240            A(i, j) = m[(i - 1) * 3 + j - 1];
241        }
242    }
243
244    Matrix U;
245    DiagonalMatrix D;
246    eigenvalues(A, D ,U);
247    Vector3 emax(U(1, 3), U(2, 3), U(3, 3));
248
249    // End newmat11 dependency
250
251    int left = 0, right = count - 1;
252
253    Point p;
254    for (;left < right;) {
255        while (left < count && emax.dot(data[left].position - mean) >= 0.0f) {
256            ++left;
257        }
258        while (right >= 0 && emax.dot(data[right].position - mean) < 0.0f) {
259            --right;
260        }
261        if (left > right) {
262            break;
263        }
264
265        p = data[left];
266        data[left] = data[right];
267        data[right] = p;
268        ++left, --right;
269    }
270
271    if (left == 0 || right == count - 1) {
272        TRACE("error");
273        exit(1);
274    } else {
275        split(data, left, limit);
276        split(data + left, count - left, limit);
277    }
278}
279
280void
281PCASplit::analyze(ClusterListNode *clusterNode, Cluster *parent, int level,
282                  float limit)
283{
284    if (level > _maxLevel) {
285        return;
286    }
287    if (level > _finalMaxLevel) {
288        _finalMaxLevel = level;
289    }
290
291    init();
292
293    ClusterListNode *clNode = clusterNode;
294
295    // initialize the indexCount of indices
296    _indexCount = 0;
297    while (clNode) {
298        if (clNode->data) {
299            split(clNode->data->points_t, clNode->data->numOfPoints_t, limit);
300            _indices[_indexCount++] = _curClusterCount;
301        }
302        clNode = clNode->next;
303    }
304
305    //Vector3 mean;
306    //computeCentroid(cluster->points, cluster->numOfPoints, mean);
307
308    // the process values of split are in _curClusterNode and _curClusterCount
309    ClusterListNode *curClusterNode = _curClusterNode;
310    unsigned int curClusterNodeCount = _curClusterCount;
311
312    if (curClusterNodeCount) {
313        // create and init centroid
314        Cluster *retClusterBlock =
315            createClusterBlock(curClusterNode, curClusterNodeCount, level);
316
317        _curRoot = retClusterBlock;
318
319        if (level == _maxLevel) {
320            ClusterListNode *node = curClusterNode;
321            if (_indexCount > 0) {
322                // for parent
323                Point *points = new Point[curClusterNodeCount];
324
325                parent[0].setChildren(retClusterBlock, _indices[0]);
326                parent[0].setPoints(points, _indices[0]);
327
328                for (unsigned int i = 0, in = 0; i < curClusterNodeCount; ++i) {
329                    if (i >= _indices[in]) {
330                        in++;
331                        parent[in].setChildren(retClusterBlock + _indices[in - 1], _indices[in] - _indices[in - 1]);
332                        parent[in].setPoints(points + _indices[in - 1], _indices[in] - _indices[in - 1]);
333                    }
334
335                    retClusterBlock[i].scale = node->data->scale_t;
336                    retClusterBlock[i].centroid = node->data->centroid_t;
337                    retClusterBlock[i].points = node->data->points_t;
338                    retClusterBlock[i].numOfPoints = node->data->numOfPoints_t;
339                    retClusterBlock[i].color.set(float(rand()) / RAND_MAX,
340                                                 float(rand()) / RAND_MAX,
341                                                 float(rand()) / RAND_MAX,
342                                                 0.2);
343
344                    points[i].position = node->data->centroid_t;
345                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
346                    points[i].size = node->data->scale_t;
347
348                    node = node->next;
349                }
350            }
351        } else {
352            Point *points = new Point[curClusterNodeCount];
353            ClusterListNode *node = curClusterNode;
354
355            if (_indexCount > 0) {
356                parent[0].setPoints(points, _indices[0]);
357                parent[0].setChildren(retClusterBlock, _indices[0]);
358                for (int k = 1; k < _indexCount; ++k) {
359                    parent[k].setPoints(points + _indices[k - 1],
360                                        _indices[k] - _indices[k - 1]);
361                    parent[k].setChildren(retClusterBlock + _indices[k - 1],
362                                          _indices[k] - _indices[k - 1]);
363                }
364
365                // set points of sub-clusters
366                for (unsigned int i = 0; i < curClusterNodeCount; ++i) {
367                    points[i].position = node->data->centroid_t;
368                    points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f);
369                    points[i].size = node->data->scale_t;
370                    node = node->next;
371                }
372            }
373        }
374    }
375}
Note: See TracBrowser for help on using the repository browser.