1 | /* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil -*- */ |
---|
2 | /* |
---|
3 | * Copyright (c) 2004-2013 HUBzero Foundation, LLC |
---|
4 | * |
---|
5 | */ |
---|
6 | #include <stdio.h> |
---|
7 | |
---|
8 | #define WANT_STREAM // include.h will get stream fns |
---|
9 | #define WANT_MATH // include.h will get math fns |
---|
10 | // newmatap.h will get include.h |
---|
11 | #include <newmatap.h> // need matrix applications |
---|
12 | #include <newmatio.h> // need matrix output routines |
---|
13 | #include <newmatrc.h> |
---|
14 | #ifdef use_namespace |
---|
15 | using namespace NEWMAT; // access NEWMAT namespace |
---|
16 | #endif |
---|
17 | |
---|
18 | #include "PCASplit.h" |
---|
19 | #include "Trace.h" |
---|
20 | |
---|
21 | using namespace vrmath; |
---|
22 | using namespace PCA; |
---|
23 | |
---|
24 | PCASplit::PCASplit() : |
---|
25 | _maxLevel(4), |
---|
26 | _minDistance(0.5f), |
---|
27 | _distanceScale(0.2f), |
---|
28 | _indexCount(0), |
---|
29 | _finalMaxLevel(0) |
---|
30 | { |
---|
31 | _indices = new unsigned int[MAX_INDEX]; |
---|
32 | _memClusterChunk1 = new ClusterListNode[MAX_INDEX]; |
---|
33 | _memClusterChunk2 = new ClusterListNode[MAX_INDEX]; |
---|
34 | _curMemClusterChunk = _memClusterChunk1; |
---|
35 | _memClusterChunkIndex = 0; |
---|
36 | } |
---|
37 | |
---|
38 | PCASplit::~PCASplit() |
---|
39 | { |
---|
40 | delete [] _indices; |
---|
41 | delete [] _memClusterChunk1; |
---|
42 | delete [] _memClusterChunk2; |
---|
43 | } |
---|
44 | |
---|
45 | void |
---|
46 | PCASplit::computeCentroid(Point *data, int count, Vector3f& mean) |
---|
47 | { |
---|
48 | float sumx = 0, sumy = 0, sumz = 0; |
---|
49 | float size = 0; |
---|
50 | float sumsize = 0; |
---|
51 | for (int i = 0; i < count; ++i) { |
---|
52 | size = data[i].size; |
---|
53 | sumx += data[i].position.x * size; |
---|
54 | sumy += data[i].position.y * size; |
---|
55 | sumz += data[i].position.z * size; |
---|
56 | sumsize += size; |
---|
57 | } |
---|
58 | sumsize = 1.0f / sumsize; |
---|
59 | mean.set(sumx * sumsize, sumy * sumsize, sumz * sumsize); |
---|
60 | } |
---|
61 | |
---|
62 | void |
---|
63 | PCASplit::computeCovariant(Point *data, int count, const Vector3f& mean, |
---|
64 | float *m) |
---|
65 | { |
---|
66 | memset(m, 0, sizeof(float) * 9); |
---|
67 | |
---|
68 | for (int i = 0; i < count; ++i) { |
---|
69 | m[0] += (data[i].position.x - mean.x) * (data[i].position.x - mean.x); |
---|
70 | m[1] += (data[i].position.x - mean.x) * (data[i].position.y - mean.y); |
---|
71 | m[2] += (data[i].position.x - mean.x) * (data[i].position.z - mean.z); |
---|
72 | m[4] += (data[i].position.y - mean.y) * (data[i].position.y - mean.y); |
---|
73 | m[5] += (data[i].position.y - mean.y) * (data[i].position.z - mean.z); |
---|
74 | m[8] += (data[i].position.z - mean.z) * (data[i].position.z - mean.z); |
---|
75 | } |
---|
76 | |
---|
77 | float invCount = 1.0f / (count - 1); |
---|
78 | m[0] *= invCount; |
---|
79 | m[1] *= invCount; |
---|
80 | m[2] *= invCount; |
---|
81 | m[4] *= invCount; |
---|
82 | m[5] *= invCount; |
---|
83 | m[8] *= invCount; |
---|
84 | m[3] = m[1]; |
---|
85 | m[6] = m[2]; |
---|
86 | m[7] = m[5]; |
---|
87 | } |
---|
88 | |
---|
89 | void |
---|
90 | PCASplit::computeDistortion(Point *data, int count, const Vector3f& mean, |
---|
91 | float& distortion, float& finalSize) |
---|
92 | { |
---|
93 | distortion = 0.0f; |
---|
94 | finalSize = 0.0f; |
---|
95 | |
---|
96 | float maxSize = 0.0f; |
---|
97 | float distance; |
---|
98 | for (int i = 0; i < count; ++i) { |
---|
99 | // sum |
---|
100 | distance = mean.distanceSquare(data[i].position.x, data[i].position.y, data[i].position.z); |
---|
101 | distortion += distance; |
---|
102 | |
---|
103 | if (data[i].size > maxSize) { |
---|
104 | maxSize = data[i].size; |
---|
105 | } |
---|
106 | |
---|
107 | /* |
---|
108 | finalSize += data[i].size * sqrt(distance); |
---|
109 | */ |
---|
110 | if (distance > finalSize) { |
---|
111 | finalSize = distance; |
---|
112 | } |
---|
113 | } |
---|
114 | // equation 2 |
---|
115 | //finalSize = 0.5f * sqrt (finalSize) / (float) (count - 1) + maxSize; |
---|
116 | finalSize = sqrt (finalSize) + maxSize; |
---|
117 | } |
---|
118 | |
---|
119 | void |
---|
120 | PCASplit::init() |
---|
121 | { |
---|
122 | _curClusterNode = NULL; |
---|
123 | _curClusterCount = 0; |
---|
124 | } |
---|
125 | |
---|
126 | Cluster * |
---|
127 | PCASplit::createClusterBlock(ClusterListNode *clusterList, int count, int level) |
---|
128 | { |
---|
129 | static int cc = 0; |
---|
130 | cc += count; |
---|
131 | Cluster *clusterBlock = new Cluster[count]; |
---|
132 | |
---|
133 | _clusterHeader->numOfClusters[level - 1] = count; |
---|
134 | _clusterHeader->startPointerCluster[level - 1] = clusterBlock; |
---|
135 | |
---|
136 | TRACE("Cluster created %d [in level %d]:total %d", count, level, cc); |
---|
137 | |
---|
138 | int i = 0; |
---|
139 | ClusterListNode *clusterNode = clusterList; |
---|
140 | while (clusterNode) { |
---|
141 | clusterBlock[i].centroid = clusterList->data->centroid_t; |
---|
142 | clusterBlock[i].level = level; |
---|
143 | clusterBlock[i].scale = clusterList->data->scale_t; |
---|
144 | |
---|
145 | clusterNode = clusterNode->next; |
---|
146 | ++i; |
---|
147 | } |
---|
148 | if (count != i) { |
---|
149 | ERROR("Problem walking clusterList: count: %d, i: %d", count, i); |
---|
150 | } |
---|
151 | return clusterBlock; |
---|
152 | } |
---|
153 | |
---|
154 | ClusterAccel * |
---|
155 | PCASplit::doIt(Point *data, int count) |
---|
156 | { |
---|
157 | init(); |
---|
158 | |
---|
159 | _clusterHeader = new ClusterAccel(_maxLevel); |
---|
160 | |
---|
161 | Cluster *root = new Cluster; |
---|
162 | Cluster_t *cluster_t = new Cluster_t(); |
---|
163 | cluster_t->points_t = data; |
---|
164 | cluster_t->numOfPoints_t = count; |
---|
165 | root->level = 1; |
---|
166 | |
---|
167 | _clusterHeader->root = root; |
---|
168 | _clusterHeader->numOfClusters[0] = 1; |
---|
169 | _clusterHeader->startPointerCluster[0] = root; |
---|
170 | |
---|
171 | Vector3f mean; |
---|
172 | float distortion, scale; |
---|
173 | |
---|
174 | computeCentroid(cluster_t->points_t, cluster_t->numOfPoints_t, mean); |
---|
175 | computeDistortion(cluster_t->points_t, cluster_t->numOfPoints_t, mean, |
---|
176 | distortion, scale); |
---|
177 | |
---|
178 | cluster_t->centroid_t = mean; |
---|
179 | cluster_t->scale_t = scale; |
---|
180 | |
---|
181 | float mindistance = _minDistance; |
---|
182 | int level = 2; |
---|
183 | |
---|
184 | ClusterListNode *clustNodeList = &(_memClusterChunk2[0]); |
---|
185 | clustNodeList->data = cluster_t; |
---|
186 | clustNodeList->next = NULL; |
---|
187 | |
---|
188 | _curRoot = root; |
---|
189 | do { |
---|
190 | analyze(clustNodeList, _curRoot, level, mindistance); |
---|
191 | |
---|
192 | mindistance *= _distanceScale; |
---|
193 | ++level; |
---|
194 | |
---|
195 | // swap memory buffer & init |
---|
196 | _curMemClusterChunk = (_curMemClusterChunk == _memClusterChunk1) ? |
---|
197 | _memClusterChunk2 : _memClusterChunk1; |
---|
198 | _memClusterChunkIndex = 0; |
---|
199 | |
---|
200 | clustNodeList = _curClusterNode; |
---|
201 | } while (level <= _maxLevel); |
---|
202 | |
---|
203 | return _clusterHeader; |
---|
204 | } |
---|
205 | |
---|
206 | void |
---|
207 | PCASplit::addLeafCluster(Cluster_t *cluster) |
---|
208 | { |
---|
209 | ClusterListNode *clusterNode = |
---|
210 | &_curMemClusterChunk[_memClusterChunkIndex++]; |
---|
211 | clusterNode->data = cluster; |
---|
212 | clusterNode->next = _curClusterNode; |
---|
213 | _curClusterNode = clusterNode; |
---|
214 | ++_curClusterCount; |
---|
215 | } |
---|
216 | |
---|
217 | void |
---|
218 | PCASplit::split(Point *data, int count, float limit) |
---|
219 | { |
---|
220 | Vector3f mean; |
---|
221 | float m[9]; |
---|
222 | |
---|
223 | computeCentroid(data, count, mean); |
---|
224 | float scale, distortion; |
---|
225 | computeDistortion(data, count, mean, distortion, scale); |
---|
226 | |
---|
227 | //if (distortion < limit) |
---|
228 | if (scale < limit) { |
---|
229 | Cluster_t *cluster_t = new Cluster_t(); |
---|
230 | cluster_t->centroid_t = mean; |
---|
231 | cluster_t->points_t = data; |
---|
232 | cluster_t->numOfPoints_t = count; |
---|
233 | cluster_t->scale_t = scale; |
---|
234 | addLeafCluster(cluster_t); |
---|
235 | return; |
---|
236 | } |
---|
237 | |
---|
238 | computeCovariant(data, count, mean, m); |
---|
239 | |
---|
240 | // Begin newmat11 dependency |
---|
241 | |
---|
242 | SymmetricMatrix A(3); |
---|
243 | for (int i = 1; i <= 3; ++i) { |
---|
244 | for (int j = 1; j <= i; ++j) { |
---|
245 | A(i, j) = m[(i - 1) * 3 + j - 1]; |
---|
246 | } |
---|
247 | } |
---|
248 | |
---|
249 | Matrix U; |
---|
250 | DiagonalMatrix D; |
---|
251 | eigenvalues(A, D ,U); |
---|
252 | Vector3f emax(U(1, 3), U(2, 3), U(3, 3)); |
---|
253 | |
---|
254 | // End newmat11 dependency |
---|
255 | |
---|
256 | int left = 0, right = count - 1; |
---|
257 | |
---|
258 | Point p; |
---|
259 | for (;left < right;) { |
---|
260 | while (left < count && emax.dot(data[left].position - mean) >= 0.0f) { |
---|
261 | ++left; |
---|
262 | } |
---|
263 | while (right >= 0 && emax.dot(data[right].position - mean) < 0.0f) { |
---|
264 | --right; |
---|
265 | } |
---|
266 | if (left > right) { |
---|
267 | break; |
---|
268 | } |
---|
269 | |
---|
270 | p = data[left]; |
---|
271 | data[left] = data[right]; |
---|
272 | data[right] = p; |
---|
273 | ++left, --right; |
---|
274 | } |
---|
275 | |
---|
276 | if (left == 0 || right == count - 1) { |
---|
277 | TRACE("error"); |
---|
278 | exit(1); |
---|
279 | } else { |
---|
280 | split(data, left, limit); |
---|
281 | split(data + left, count - left, limit); |
---|
282 | } |
---|
283 | } |
---|
284 | |
---|
285 | void |
---|
286 | PCASplit::analyze(ClusterListNode *clusterNode, Cluster *parent, int level, |
---|
287 | float limit) |
---|
288 | { |
---|
289 | if (level > _maxLevel) { |
---|
290 | return; |
---|
291 | } |
---|
292 | if (level > _finalMaxLevel) { |
---|
293 | _finalMaxLevel = level; |
---|
294 | } |
---|
295 | |
---|
296 | init(); |
---|
297 | |
---|
298 | ClusterListNode *clNode = clusterNode; |
---|
299 | |
---|
300 | // initialize the indexCount of indices |
---|
301 | _indexCount = 0; |
---|
302 | while (clNode) { |
---|
303 | if (clNode->data) { |
---|
304 | split(clNode->data->points_t, clNode->data->numOfPoints_t, limit); |
---|
305 | _indices[_indexCount++] = _curClusterCount; |
---|
306 | } |
---|
307 | clNode = clNode->next; |
---|
308 | } |
---|
309 | |
---|
310 | //Vector3f mean; |
---|
311 | //computeCentroid(cluster->points, cluster->numOfPoints, mean); |
---|
312 | |
---|
313 | // the process values of split are in _curClusterNode and _curClusterCount |
---|
314 | ClusterListNode *curClusterNode = _curClusterNode; |
---|
315 | unsigned int curClusterNodeCount = _curClusterCount; |
---|
316 | |
---|
317 | if (curClusterNodeCount) { |
---|
318 | // create and init centroid |
---|
319 | Cluster *retClusterBlock = |
---|
320 | createClusterBlock(curClusterNode, curClusterNodeCount, level); |
---|
321 | |
---|
322 | _curRoot = retClusterBlock; |
---|
323 | |
---|
324 | if (level == _maxLevel) { |
---|
325 | ClusterListNode *node = curClusterNode; |
---|
326 | if (_indexCount > 0) { |
---|
327 | // for parent |
---|
328 | Point *points = new Point[curClusterNodeCount]; |
---|
329 | |
---|
330 | parent[0].setChildren(retClusterBlock, _indices[0]); |
---|
331 | parent[0].setPoints(points, _indices[0]); |
---|
332 | |
---|
333 | for (unsigned int i = 0, in = 0; i < curClusterNodeCount; ++i) { |
---|
334 | if (i >= _indices[in]) { |
---|
335 | in++; |
---|
336 | parent[in].setChildren(retClusterBlock + _indices[in - 1], _indices[in] - _indices[in - 1]); |
---|
337 | parent[in].setPoints(points + _indices[in - 1], _indices[in] - _indices[in - 1]); |
---|
338 | } |
---|
339 | |
---|
340 | retClusterBlock[i].scale = node->data->scale_t; |
---|
341 | retClusterBlock[i].centroid = node->data->centroid_t; |
---|
342 | retClusterBlock[i].points = node->data->points_t; |
---|
343 | retClusterBlock[i].numOfPoints = node->data->numOfPoints_t; |
---|
344 | retClusterBlock[i].color.set(float(rand()) / RAND_MAX, |
---|
345 | float(rand()) / RAND_MAX, |
---|
346 | float(rand()) / RAND_MAX, |
---|
347 | 0.2); |
---|
348 | |
---|
349 | points[i].position = node->data->centroid_t; |
---|
350 | points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f); |
---|
351 | points[i].size = node->data->scale_t; |
---|
352 | |
---|
353 | node = node->next; |
---|
354 | } |
---|
355 | } |
---|
356 | } else { |
---|
357 | Point *points = new Point[curClusterNodeCount]; |
---|
358 | ClusterListNode *node = curClusterNode; |
---|
359 | |
---|
360 | if (_indexCount > 0) { |
---|
361 | parent[0].setPoints(points, _indices[0]); |
---|
362 | parent[0].setChildren(retClusterBlock, _indices[0]); |
---|
363 | for (int k = 1; k < _indexCount; ++k) { |
---|
364 | parent[k].setPoints(points + _indices[k - 1], |
---|
365 | _indices[k] - _indices[k - 1]); |
---|
366 | parent[k].setChildren(retClusterBlock + _indices[k - 1], |
---|
367 | _indices[k] - _indices[k - 1]); |
---|
368 | } |
---|
369 | |
---|
370 | // set points of sub-clusters |
---|
371 | for (unsigned int i = 0; i < curClusterNodeCount; ++i) { |
---|
372 | points[i].position = node->data->centroid_t; |
---|
373 | points[i].color.set(1.0f, 1.0f, 1.0f, 0.2f); |
---|
374 | points[i].size = node->data->scale_t; |
---|
375 | node = node->next; |
---|
376 | } |
---|
377 | } |
---|
378 | } |
---|
379 | } |
---|
380 | } |
---|