rev |
line source |
nuclear@48
|
1 /*
|
nuclear@48
|
2 This file is part of ``kdtree'', a library for working with kd-trees.
|
nuclear@48
|
3 Copyright (C) 2007-2011 John Tsiombikas <nuclear@member.fsf.org>
|
nuclear@48
|
4
|
nuclear@48
|
5 Redistribution and use in source and binary forms, with or without
|
nuclear@48
|
6 modification, are permitted provided that the following conditions are met:
|
nuclear@48
|
7
|
nuclear@48
|
8 1. Redistributions of source code must retain the above copyright notice, this
|
nuclear@48
|
9 list of conditions and the following disclaimer.
|
nuclear@48
|
10 2. Redistributions in binary form must reproduce the above copyright notice,
|
nuclear@48
|
11 this list of conditions and the following disclaimer in the documentation
|
nuclear@48
|
12 and/or other materials provided with the distribution.
|
nuclear@48
|
13 3. The name of the author may not be used to endorse or promote products
|
nuclear@48
|
14 derived from this software without specific prior written permission.
|
nuclear@48
|
15
|
nuclear@48
|
16 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
nuclear@48
|
17 WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
nuclear@48
|
18 MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
nuclear@48
|
19 EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
nuclear@48
|
20 EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
|
nuclear@48
|
21 OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
nuclear@48
|
22 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
nuclear@48
|
23 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
nuclear@48
|
24 IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
|
nuclear@48
|
25 OF SUCH DAMAGE.
|
nuclear@48
|
26 */
|
nuclear@48
|
27 /* single nearest neighbor search written by Tamas Nepusz <tamas@cs.rhul.ac.uk> */
|
nuclear@48
|
28 #include <stdio.h>
|
nuclear@48
|
29 #include <stdlib.h>
|
nuclear@48
|
30 #include <string.h>
|
nuclear@48
|
31 #include <math.h>
|
nuclear@48
|
32 #include "kdtree.h"
|
nuclear@48
|
33
|
nuclear@48
|
34 #define USE_LIST_NODE_ALLOCATOR
|
nuclear@48
|
35 #define NO_PTHREADS
|
nuclear@48
|
36 #define I_WANT_THREAD_BUGS
|
nuclear@48
|
37
|
nuclear@48
|
38 #if defined(WIN32) || defined(__WIN32__)
|
nuclear@48
|
39 #include <malloc.h>
|
nuclear@48
|
40 #endif
|
nuclear@48
|
41
|
nuclear@48
|
42 #ifdef USE_LIST_NODE_ALLOCATOR
|
nuclear@48
|
43
|
nuclear@48
|
44 #ifndef NO_PTHREADS
|
nuclear@48
|
45 #include <pthread.h>
|
nuclear@48
|
46 #else
|
nuclear@48
|
47
|
nuclear@48
|
48 #ifndef I_WANT_THREAD_BUGS
|
nuclear@48
|
49 #error "You are compiling with the fast list node allocator, with pthreads disabled! This WILL break if used from multiple threads."
|
nuclear@48
|
50 #endif /* I want thread bugs */
|
nuclear@48
|
51
|
nuclear@48
|
52 #endif /* pthread support */
|
nuclear@48
|
53 #endif /* use list node allocator */
|
nuclear@48
|
54
|
nuclear@48
|
55 struct kdhyperrect {
|
nuclear@48
|
56 int dim;
|
nuclear@48
|
57 double *min, *max; /* minimum/maximum coords */
|
nuclear@48
|
58 };
|
nuclear@48
|
59
|
nuclear@48
|
60 struct kdnode {
|
nuclear@48
|
61 double *pos;
|
nuclear@48
|
62 int dir;
|
nuclear@48
|
63 void *data;
|
nuclear@48
|
64
|
nuclear@48
|
65 struct kdnode *left, *right; /* negative/positive side */
|
nuclear@48
|
66 };
|
nuclear@48
|
67
|
nuclear@48
|
68 struct res_node {
|
nuclear@48
|
69 struct kdnode *item;
|
nuclear@48
|
70 double dist_sq;
|
nuclear@48
|
71 struct res_node *next;
|
nuclear@48
|
72 };
|
nuclear@48
|
73
|
nuclear@48
|
74 struct kdtree {
|
nuclear@48
|
75 int dim;
|
nuclear@48
|
76 struct kdnode *root;
|
nuclear@48
|
77 struct kdhyperrect *rect;
|
nuclear@48
|
78 void (*destr)(void*);
|
nuclear@48
|
79 };
|
nuclear@48
|
80
|
nuclear@48
|
81 struct kdres {
|
nuclear@48
|
82 struct kdtree *tree;
|
nuclear@48
|
83 struct res_node *rlist, *riter;
|
nuclear@48
|
84 int size;
|
nuclear@48
|
85 };
|
nuclear@48
|
86
|
nuclear@48
|
87 #define SQ(x) ((x) * (x))
|
nuclear@48
|
88
|
nuclear@48
|
89
|
nuclear@48
|
90 static void clear_rec(struct kdnode *node, void (*destr)(void*));
|
nuclear@48
|
91 static int insert_rec(struct kdnode **node, const double *pos, void *data, int dir, int dim);
|
nuclear@48
|
92 static int rlist_insert(struct res_node *list, struct kdnode *item, double dist_sq);
|
nuclear@48
|
93 static void clear_results(struct kdres *set);
|
nuclear@48
|
94
|
nuclear@48
|
95 static struct kdhyperrect* hyperrect_create(int dim, const double *min, const double *max);
|
nuclear@48
|
96 static void hyperrect_free(struct kdhyperrect *rect);
|
nuclear@48
|
97 static struct kdhyperrect* hyperrect_duplicate(const struct kdhyperrect *rect);
|
nuclear@48
|
98 static void hyperrect_extend(struct kdhyperrect *rect, const double *pos);
|
nuclear@48
|
99 static double hyperrect_dist_sq(struct kdhyperrect *rect, const double *pos);
|
nuclear@48
|
100
|
nuclear@48
|
101 #ifdef USE_LIST_NODE_ALLOCATOR
|
nuclear@48
|
102 static struct res_node *alloc_resnode(void);
|
nuclear@48
|
103 static void free_resnode(struct res_node*);
|
nuclear@48
|
104 #else
|
nuclear@48
|
105 #define alloc_resnode() malloc(sizeof(struct res_node))
|
nuclear@48
|
106 #define free_resnode(n) free(n)
|
nuclear@48
|
107 #endif
|
nuclear@48
|
108
|
nuclear@48
|
109
|
nuclear@48
|
110
|
nuclear@48
|
111 struct kdtree *kd_create(int k)
|
nuclear@48
|
112 {
|
nuclear@48
|
113 struct kdtree *tree;
|
nuclear@48
|
114
|
nuclear@48
|
115 if(!(tree = malloc(sizeof *tree))) {
|
nuclear@48
|
116 return 0;
|
nuclear@48
|
117 }
|
nuclear@48
|
118
|
nuclear@48
|
119 tree->dim = k;
|
nuclear@48
|
120 tree->root = 0;
|
nuclear@48
|
121 tree->destr = 0;
|
nuclear@48
|
122 tree->rect = 0;
|
nuclear@48
|
123
|
nuclear@48
|
124 return tree;
|
nuclear@48
|
125 }
|
nuclear@48
|
126
|
nuclear@48
|
127 void kd_free(struct kdtree *tree)
|
nuclear@48
|
128 {
|
nuclear@48
|
129 if(tree) {
|
nuclear@48
|
130 kd_clear(tree);
|
nuclear@48
|
131 free(tree);
|
nuclear@48
|
132 }
|
nuclear@48
|
133 }
|
nuclear@48
|
134
|
nuclear@48
|
135 static void clear_rec(struct kdnode *node, void (*destr)(void*))
|
nuclear@48
|
136 {
|
nuclear@48
|
137 if(!node) return;
|
nuclear@48
|
138
|
nuclear@48
|
139 clear_rec(node->left, destr);
|
nuclear@48
|
140 clear_rec(node->right, destr);
|
nuclear@48
|
141
|
nuclear@48
|
142 if(destr) {
|
nuclear@48
|
143 destr(node->data);
|
nuclear@48
|
144 }
|
nuclear@48
|
145 free(node->pos);
|
nuclear@48
|
146 free(node);
|
nuclear@48
|
147 }
|
nuclear@48
|
148
|
nuclear@48
|
149 void kd_clear(struct kdtree *tree)
|
nuclear@48
|
150 {
|
nuclear@48
|
151 clear_rec(tree->root, tree->destr);
|
nuclear@48
|
152 tree->root = 0;
|
nuclear@48
|
153
|
nuclear@48
|
154 if (tree->rect) {
|
nuclear@48
|
155 hyperrect_free(tree->rect);
|
nuclear@48
|
156 tree->rect = 0;
|
nuclear@48
|
157 }
|
nuclear@48
|
158 }
|
nuclear@48
|
159
|
nuclear@48
|
160 void kd_data_destructor(struct kdtree *tree, void (*destr)(void*))
|
nuclear@48
|
161 {
|
nuclear@48
|
162 tree->destr = destr;
|
nuclear@48
|
163 }
|
nuclear@48
|
164
|
nuclear@48
|
165
|
nuclear@48
|
166 static int insert_rec(struct kdnode **nptr, const double *pos, void *data, int dir, int dim)
|
nuclear@48
|
167 {
|
nuclear@48
|
168 int new_dir;
|
nuclear@48
|
169 struct kdnode *node;
|
nuclear@48
|
170
|
nuclear@48
|
171 if(!*nptr) {
|
nuclear@48
|
172 if(!(node = malloc(sizeof *node))) {
|
nuclear@48
|
173 return -1;
|
nuclear@48
|
174 }
|
nuclear@48
|
175 if(!(node->pos = malloc(dim * sizeof *node->pos))) {
|
nuclear@48
|
176 free(node);
|
nuclear@48
|
177 return -1;
|
nuclear@48
|
178 }
|
nuclear@48
|
179 memcpy(node->pos, pos, dim * sizeof *node->pos);
|
nuclear@48
|
180 node->data = data;
|
nuclear@48
|
181 node->dir = dir;
|
nuclear@48
|
182 node->left = node->right = 0;
|
nuclear@48
|
183 *nptr = node;
|
nuclear@48
|
184 return 0;
|
nuclear@48
|
185 }
|
nuclear@48
|
186
|
nuclear@48
|
187 node = *nptr;
|
nuclear@48
|
188 new_dir = (node->dir + 1) % dim;
|
nuclear@48
|
189 if(pos[node->dir] < node->pos[node->dir]) {
|
nuclear@48
|
190 return insert_rec(&(*nptr)->left, pos, data, new_dir, dim);
|
nuclear@48
|
191 }
|
nuclear@48
|
192 return insert_rec(&(*nptr)->right, pos, data, new_dir, dim);
|
nuclear@48
|
193 }
|
nuclear@48
|
194
|
nuclear@48
|
195 int kd_insert(struct kdtree *tree, const double *pos, void *data)
|
nuclear@48
|
196 {
|
nuclear@48
|
197 if (insert_rec(&tree->root, pos, data, 0, tree->dim)) {
|
nuclear@48
|
198 return -1;
|
nuclear@48
|
199 }
|
nuclear@48
|
200
|
nuclear@48
|
201 if (tree->rect == 0) {
|
nuclear@48
|
202 tree->rect = hyperrect_create(tree->dim, pos, pos);
|
nuclear@48
|
203 } else {
|
nuclear@48
|
204 hyperrect_extend(tree->rect, pos);
|
nuclear@48
|
205 }
|
nuclear@48
|
206
|
nuclear@48
|
207 return 0;
|
nuclear@48
|
208 }
|
nuclear@48
|
209
|
nuclear@48
|
210 int kd_insertf(struct kdtree *tree, const float *pos, void *data)
|
nuclear@48
|
211 {
|
nuclear@48
|
212 static double sbuf[16];
|
nuclear@48
|
213 double *bptr, *buf = 0;
|
nuclear@48
|
214 int res, dim = tree->dim;
|
nuclear@48
|
215
|
nuclear@48
|
216 if(dim > 16) {
|
nuclear@48
|
217 #ifndef NO_ALLOCA
|
nuclear@48
|
218 if(dim <= 256)
|
nuclear@48
|
219 bptr = buf = alloca(dim * sizeof *bptr);
|
nuclear@48
|
220 else
|
nuclear@48
|
221 #endif
|
nuclear@48
|
222 if(!(bptr = buf = malloc(dim * sizeof *bptr))) {
|
nuclear@48
|
223 return -1;
|
nuclear@48
|
224 }
|
nuclear@48
|
225 } else {
|
nuclear@48
|
226 bptr = buf = sbuf;
|
nuclear@48
|
227 }
|
nuclear@48
|
228
|
nuclear@48
|
229 while(dim-- > 0) {
|
nuclear@48
|
230 *bptr++ = *pos++;
|
nuclear@48
|
231 }
|
nuclear@48
|
232
|
nuclear@48
|
233 res = kd_insert(tree, buf, data);
|
nuclear@48
|
234 #ifndef NO_ALLOCA
|
nuclear@48
|
235 if(tree->dim > 256)
|
nuclear@48
|
236 #else
|
nuclear@48
|
237 if(tree->dim > 16)
|
nuclear@48
|
238 #endif
|
nuclear@48
|
239 free(buf);
|
nuclear@48
|
240 return res;
|
nuclear@48
|
241 }
|
nuclear@48
|
242
|
nuclear@48
|
243 int kd_insert3(struct kdtree *tree, double x, double y, double z, void *data)
|
nuclear@48
|
244 {
|
nuclear@48
|
245 double buf[3];
|
nuclear@48
|
246 buf[0] = x;
|
nuclear@48
|
247 buf[1] = y;
|
nuclear@48
|
248 buf[2] = z;
|
nuclear@48
|
249 return kd_insert(tree, buf, data);
|
nuclear@48
|
250 }
|
nuclear@48
|
251
|
nuclear@48
|
252 int kd_insert3f(struct kdtree *tree, float x, float y, float z, void *data)
|
nuclear@48
|
253 {
|
nuclear@48
|
254 double buf[3];
|
nuclear@48
|
255 buf[0] = x;
|
nuclear@48
|
256 buf[1] = y;
|
nuclear@48
|
257 buf[2] = z;
|
nuclear@48
|
258 return kd_insert(tree, buf, data);
|
nuclear@48
|
259 }
|
nuclear@48
|
260
|
nuclear@48
|
261 static int find_nearest(struct kdnode *node, const double *pos, double range, struct res_node *list, int ordered, int dim)
|
nuclear@48
|
262 {
|
nuclear@48
|
263 double dist_sq, dx;
|
nuclear@48
|
264 int i, ret, added_res = 0;
|
nuclear@48
|
265
|
nuclear@48
|
266 if(!node) return 0;
|
nuclear@48
|
267
|
nuclear@48
|
268 dist_sq = 0;
|
nuclear@48
|
269 for(i=0; i<dim; i++) {
|
nuclear@48
|
270 dist_sq += SQ(node->pos[i] - pos[i]);
|
nuclear@48
|
271 }
|
nuclear@48
|
272 if(dist_sq <= SQ(range)) {
|
nuclear@48
|
273 if(rlist_insert(list, node, ordered ? dist_sq : -1.0) == -1) {
|
nuclear@48
|
274 return -1;
|
nuclear@48
|
275 }
|
nuclear@48
|
276 added_res = 1;
|
nuclear@48
|
277 }
|
nuclear@48
|
278
|
nuclear@48
|
279 dx = pos[node->dir] - node->pos[node->dir];
|
nuclear@48
|
280
|
nuclear@48
|
281 ret = find_nearest(dx <= 0.0 ? node->left : node->right, pos, range, list, ordered, dim);
|
nuclear@48
|
282 if(ret >= 0 && fabs(dx) < range) {
|
nuclear@48
|
283 added_res += ret;
|
nuclear@48
|
284 ret = find_nearest(dx <= 0.0 ? node->right : node->left, pos, range, list, ordered, dim);
|
nuclear@48
|
285 }
|
nuclear@48
|
286 if(ret == -1) {
|
nuclear@48
|
287 return -1;
|
nuclear@48
|
288 }
|
nuclear@48
|
289 added_res += ret;
|
nuclear@48
|
290
|
nuclear@48
|
291 return added_res;
|
nuclear@48
|
292 }
|
nuclear@48
|
293
|
nuclear@48
|
294 #if 0
|
nuclear@48
|
295 static int find_nearest_n(struct kdnode *node, const double *pos, double range, int num, struct rheap *heap, int dim)
|
nuclear@48
|
296 {
|
nuclear@48
|
297 double dist_sq, dx;
|
nuclear@48
|
298 int i, ret, added_res = 0;
|
nuclear@48
|
299
|
nuclear@48
|
300 if(!node) return 0;
|
nuclear@48
|
301
|
nuclear@48
|
302 /* if the photon is close enough, add it to the result heap */
|
nuclear@48
|
303 dist_sq = 0;
|
nuclear@48
|
304 for(i=0; i<dim; i++) {
|
nuclear@48
|
305 dist_sq += SQ(node->pos[i] - pos[i]);
|
nuclear@48
|
306 }
|
nuclear@48
|
307 if(dist_sq <= range_sq) {
|
nuclear@48
|
308 if(heap->size >= num) {
|
nuclear@48
|
309 /* get furthest element */
|
nuclear@48
|
310 struct res_node *maxelem = rheap_get_max(heap);
|
nuclear@48
|
311
|
nuclear@48
|
312 /* and check if the new one is closer than that */
|
nuclear@48
|
313 if(maxelem->dist_sq > dist_sq) {
|
nuclear@48
|
314 rheap_remove_max(heap);
|
nuclear@48
|
315
|
nuclear@48
|
316 if(rheap_insert(heap, node, dist_sq) == -1) {
|
nuclear@48
|
317 return -1;
|
nuclear@48
|
318 }
|
nuclear@48
|
319 added_res = 1;
|
nuclear@48
|
320
|
nuclear@48
|
321 range_sq = dist_sq;
|
nuclear@48
|
322 }
|
nuclear@48
|
323 } else {
|
nuclear@48
|
324 if(rheap_insert(heap, node, dist_sq) == -1) {
|
nuclear@48
|
325 return =1;
|
nuclear@48
|
326 }
|
nuclear@48
|
327 added_res = 1;
|
nuclear@48
|
328 }
|
nuclear@48
|
329 }
|
nuclear@48
|
330
|
nuclear@48
|
331
|
nuclear@48
|
332 /* find signed distance from the splitting plane */
|
nuclear@48
|
333 dx = pos[node->dir] - node->pos[node->dir];
|
nuclear@48
|
334
|
nuclear@48
|
335 ret = find_nearest_n(dx <= 0.0 ? node->left : node->right, pos, range, num, heap, dim);
|
nuclear@48
|
336 if(ret >= 0 && fabs(dx) < range) {
|
nuclear@48
|
337 added_res += ret;
|
nuclear@48
|
338 ret = find_nearest_n(dx <= 0.0 ? node->right : node->left, pos, range, num, heap, dim);
|
nuclear@48
|
339 }
|
nuclear@48
|
340
|
nuclear@48
|
341 }
|
nuclear@48
|
342 #endif
|
nuclear@48
|
343
|
nuclear@48
|
344 static void kd_nearest_i(struct kdnode *node, const double *pos, struct kdnode **result, double *result_dist_sq, struct kdhyperrect* rect)
|
nuclear@48
|
345 {
|
nuclear@48
|
346 int dir = node->dir;
|
nuclear@48
|
347 int i;
|
nuclear@48
|
348 double dummy, dist_sq;
|
nuclear@48
|
349 struct kdnode *nearer_subtree, *farther_subtree;
|
nuclear@48
|
350 double *nearer_hyperrect_coord, *farther_hyperrect_coord;
|
nuclear@48
|
351
|
nuclear@48
|
352 /* Decide whether to go left or right in the tree */
|
nuclear@48
|
353 dummy = pos[dir] - node->pos[dir];
|
nuclear@48
|
354 if (dummy <= 0) {
|
nuclear@48
|
355 nearer_subtree = node->left;
|
nuclear@48
|
356 farther_subtree = node->right;
|
nuclear@48
|
357 nearer_hyperrect_coord = rect->max + dir;
|
nuclear@48
|
358 farther_hyperrect_coord = rect->min + dir;
|
nuclear@48
|
359 } else {
|
nuclear@48
|
360 nearer_subtree = node->right;
|
nuclear@48
|
361 farther_subtree = node->left;
|
nuclear@48
|
362 nearer_hyperrect_coord = rect->min + dir;
|
nuclear@48
|
363 farther_hyperrect_coord = rect->max + dir;
|
nuclear@48
|
364 }
|
nuclear@48
|
365
|
nuclear@48
|
366 if (nearer_subtree) {
|
nuclear@48
|
367 /* Slice the hyperrect to get the hyperrect of the nearer subtree */
|
nuclear@48
|
368 dummy = *nearer_hyperrect_coord;
|
nuclear@48
|
369 *nearer_hyperrect_coord = node->pos[dir];
|
nuclear@48
|
370 /* Recurse down into nearer subtree */
|
nuclear@48
|
371 kd_nearest_i(nearer_subtree, pos, result, result_dist_sq, rect);
|
nuclear@48
|
372 /* Undo the slice */
|
nuclear@48
|
373 *nearer_hyperrect_coord = dummy;
|
nuclear@48
|
374 }
|
nuclear@48
|
375
|
nuclear@48
|
376 /* Check the distance of the point at the current node, compare it
|
nuclear@48
|
377 * with our best so far */
|
nuclear@48
|
378 dist_sq = 0;
|
nuclear@48
|
379 for(i=0; i < rect->dim; i++) {
|
nuclear@48
|
380 dist_sq += SQ(node->pos[i] - pos[i]);
|
nuclear@48
|
381 }
|
nuclear@48
|
382 if (dist_sq < *result_dist_sq) {
|
nuclear@48
|
383 *result = node;
|
nuclear@48
|
384 *result_dist_sq = dist_sq;
|
nuclear@48
|
385 }
|
nuclear@48
|
386
|
nuclear@48
|
387 if (farther_subtree) {
|
nuclear@48
|
388 /* Get the hyperrect of the farther subtree */
|
nuclear@48
|
389 dummy = *farther_hyperrect_coord;
|
nuclear@48
|
390 *farther_hyperrect_coord = node->pos[dir];
|
nuclear@48
|
391 /* Check if we have to recurse down by calculating the closest
|
nuclear@48
|
392 * point of the hyperrect and see if it's closer than our
|
nuclear@48
|
393 * minimum distance in result_dist_sq. */
|
nuclear@48
|
394 if (hyperrect_dist_sq(rect, pos) < *result_dist_sq) {
|
nuclear@48
|
395 /* Recurse down into farther subtree */
|
nuclear@48
|
396 kd_nearest_i(farther_subtree, pos, result, result_dist_sq, rect);
|
nuclear@48
|
397 }
|
nuclear@48
|
398 /* Undo the slice on the hyperrect */
|
nuclear@48
|
399 *farther_hyperrect_coord = dummy;
|
nuclear@48
|
400 }
|
nuclear@48
|
401 }
|
nuclear@48
|
402
|
nuclear@48
|
403 struct kdres *kd_nearest(struct kdtree *kd, const double *pos)
|
nuclear@48
|
404 {
|
nuclear@48
|
405 struct kdhyperrect *rect;
|
nuclear@48
|
406 struct kdnode *result;
|
nuclear@48
|
407 struct kdres *rset;
|
nuclear@48
|
408 double dist_sq;
|
nuclear@48
|
409 int i;
|
nuclear@48
|
410
|
nuclear@48
|
411 if (!kd) return 0;
|
nuclear@48
|
412 if (!kd->rect) return 0;
|
nuclear@48
|
413
|
nuclear@48
|
414 /* Allocate result set */
|
nuclear@48
|
415 if(!(rset = malloc(sizeof *rset))) {
|
nuclear@48
|
416 return 0;
|
nuclear@48
|
417 }
|
nuclear@48
|
418 if(!(rset->rlist = alloc_resnode())) {
|
nuclear@48
|
419 free(rset);
|
nuclear@48
|
420 return 0;
|
nuclear@48
|
421 }
|
nuclear@48
|
422 rset->rlist->next = 0;
|
nuclear@48
|
423 rset->tree = kd;
|
nuclear@48
|
424
|
nuclear@48
|
425 /* Duplicate the bounding hyperrectangle, we will work on the copy */
|
nuclear@48
|
426 if (!(rect = hyperrect_duplicate(kd->rect))) {
|
nuclear@48
|
427 kd_res_free(rset);
|
nuclear@48
|
428 return 0;
|
nuclear@48
|
429 }
|
nuclear@48
|
430
|
nuclear@48
|
431 /* Our first guesstimate is the root node */
|
nuclear@48
|
432 result = kd->root;
|
nuclear@48
|
433 dist_sq = 0;
|
nuclear@48
|
434 for (i = 0; i < kd->dim; i++)
|
nuclear@48
|
435 dist_sq += SQ(result->pos[i] - pos[i]);
|
nuclear@48
|
436
|
nuclear@48
|
437 /* Search for the nearest neighbour recursively */
|
nuclear@48
|
438 kd_nearest_i(kd->root, pos, &result, &dist_sq, rect);
|
nuclear@48
|
439
|
nuclear@48
|
440 /* Free the copy of the hyperrect */
|
nuclear@48
|
441 hyperrect_free(rect);
|
nuclear@48
|
442
|
nuclear@48
|
443 /* Store the result */
|
nuclear@48
|
444 if (result) {
|
nuclear@48
|
445 if (rlist_insert(rset->rlist, result, -1.0) == -1) {
|
nuclear@48
|
446 kd_res_free(rset);
|
nuclear@48
|
447 return 0;
|
nuclear@48
|
448 }
|
nuclear@48
|
449 rset->size = 1;
|
nuclear@48
|
450 kd_res_rewind(rset);
|
nuclear@48
|
451 return rset;
|
nuclear@48
|
452 } else {
|
nuclear@48
|
453 kd_res_free(rset);
|
nuclear@48
|
454 return 0;
|
nuclear@48
|
455 }
|
nuclear@48
|
456 }
|
nuclear@48
|
457
|
nuclear@48
|
458 struct kdres *kd_nearestf(struct kdtree *tree, const float *pos)
|
nuclear@48
|
459 {
|
nuclear@48
|
460 static double sbuf[16];
|
nuclear@48
|
461 double *bptr, *buf = 0;
|
nuclear@48
|
462 int dim = tree->dim;
|
nuclear@48
|
463 struct kdres *res;
|
nuclear@48
|
464
|
nuclear@48
|
465 if(dim > 16) {
|
nuclear@48
|
466 #ifndef NO_ALLOCA
|
nuclear@48
|
467 if(dim <= 256)
|
nuclear@48
|
468 bptr = buf = alloca(dim * sizeof *bptr);
|
nuclear@48
|
469 else
|
nuclear@48
|
470 #endif
|
nuclear@48
|
471 if(!(bptr = buf = malloc(dim * sizeof *bptr))) {
|
nuclear@48
|
472 return 0;
|
nuclear@48
|
473 }
|
nuclear@48
|
474 } else {
|
nuclear@48
|
475 bptr = buf = sbuf;
|
nuclear@48
|
476 }
|
nuclear@48
|
477
|
nuclear@48
|
478 while(dim-- > 0) {
|
nuclear@48
|
479 *bptr++ = *pos++;
|
nuclear@48
|
480 }
|
nuclear@48
|
481
|
nuclear@48
|
482 res = kd_nearest(tree, buf);
|
nuclear@48
|
483 #ifndef NO_ALLOCA
|
nuclear@48
|
484 if(tree->dim > 256)
|
nuclear@48
|
485 #else
|
nuclear@48
|
486 if(tree->dim > 16)
|
nuclear@48
|
487 #endif
|
nuclear@48
|
488 free(buf);
|
nuclear@48
|
489 return res;
|
nuclear@48
|
490 }
|
nuclear@48
|
491
|
nuclear@48
|
492 struct kdres *kd_nearest3(struct kdtree *tree, double x, double y, double z)
|
nuclear@48
|
493 {
|
nuclear@48
|
494 double pos[3];
|
nuclear@48
|
495 pos[0] = x;
|
nuclear@48
|
496 pos[1] = y;
|
nuclear@48
|
497 pos[2] = z;
|
nuclear@48
|
498 return kd_nearest(tree, pos);
|
nuclear@48
|
499 }
|
nuclear@48
|
500
|
nuclear@48
|
501 struct kdres *kd_nearest3f(struct kdtree *tree, float x, float y, float z)
|
nuclear@48
|
502 {
|
nuclear@48
|
503 double pos[3];
|
nuclear@48
|
504 pos[0] = x;
|
nuclear@48
|
505 pos[1] = y;
|
nuclear@48
|
506 pos[2] = z;
|
nuclear@48
|
507 return kd_nearest(tree, pos);
|
nuclear@48
|
508 }
|
nuclear@48
|
509
|
nuclear@48
|
510 /* ---- nearest N search ---- */
|
nuclear@48
|
511 /*
|
nuclear@48
|
512 static kdres *kd_nearest_n(struct kdtree *kd, const double *pos, int num)
|
nuclear@48
|
513 {
|
nuclear@48
|
514 int ret;
|
nuclear@48
|
515 struct kdres *rset;
|
nuclear@48
|
516
|
nuclear@48
|
517 if(!(rset = malloc(sizeof *rset))) {
|
nuclear@48
|
518 return 0;
|
nuclear@48
|
519 }
|
nuclear@48
|
520 if(!(rset->rlist = alloc_resnode())) {
|
nuclear@48
|
521 free(rset);
|
nuclear@48
|
522 return 0;
|
nuclear@48
|
523 }
|
nuclear@48
|
524 rset->rlist->next = 0;
|
nuclear@48
|
525 rset->tree = kd;
|
nuclear@48
|
526
|
nuclear@48
|
527 if((ret = find_nearest_n(kd->root, pos, range, num, rset->rlist, kd->dim)) == -1) {
|
nuclear@48
|
528 kd_res_free(rset);
|
nuclear@48
|
529 return 0;
|
nuclear@48
|
530 }
|
nuclear@48
|
531 rset->size = ret;
|
nuclear@48
|
532 kd_res_rewind(rset);
|
nuclear@48
|
533 return rset;
|
nuclear@48
|
534 }*/
|
nuclear@48
|
535
|
nuclear@48
|
536 struct kdres *kd_nearest_range(struct kdtree *kd, const double *pos, double range)
|
nuclear@48
|
537 {
|
nuclear@48
|
538 int ret;
|
nuclear@48
|
539 struct kdres *rset;
|
nuclear@48
|
540
|
nuclear@48
|
541 if(!(rset = malloc(sizeof *rset))) {
|
nuclear@48
|
542 return 0;
|
nuclear@48
|
543 }
|
nuclear@48
|
544 if(!(rset->rlist = alloc_resnode())) {
|
nuclear@48
|
545 free(rset);
|
nuclear@48
|
546 return 0;
|
nuclear@48
|
547 }
|
nuclear@48
|
548 rset->rlist->next = 0;
|
nuclear@48
|
549 rset->tree = kd;
|
nuclear@48
|
550
|
nuclear@48
|
551 if((ret = find_nearest(kd->root, pos, range, rset->rlist, 0, kd->dim)) == -1) {
|
nuclear@48
|
552 kd_res_free(rset);
|
nuclear@48
|
553 return 0;
|
nuclear@48
|
554 }
|
nuclear@48
|
555 rset->size = ret;
|
nuclear@48
|
556 kd_res_rewind(rset);
|
nuclear@48
|
557 return rset;
|
nuclear@48
|
558 }
|
nuclear@48
|
559
|
nuclear@48
|
560 struct kdres *kd_nearest_rangef(struct kdtree *kd, const float *pos, float range)
|
nuclear@48
|
561 {
|
nuclear@48
|
562 static double sbuf[16];
|
nuclear@48
|
563 double *bptr, *buf = 0;
|
nuclear@48
|
564 int dim = kd->dim;
|
nuclear@48
|
565 struct kdres *res;
|
nuclear@48
|
566
|
nuclear@48
|
567 if(dim > 16) {
|
nuclear@48
|
568 #ifndef NO_ALLOCA
|
nuclear@48
|
569 if(dim <= 256)
|
nuclear@48
|
570 bptr = buf = alloca(dim * sizeof *bptr);
|
nuclear@48
|
571 else
|
nuclear@48
|
572 #endif
|
nuclear@48
|
573 if(!(bptr = buf = malloc(dim * sizeof *bptr))) {
|
nuclear@48
|
574 return 0;
|
nuclear@48
|
575 }
|
nuclear@48
|
576 } else {
|
nuclear@48
|
577 bptr = buf = sbuf;
|
nuclear@48
|
578 }
|
nuclear@48
|
579
|
nuclear@48
|
580 while(dim-- > 0) {
|
nuclear@48
|
581 *bptr++ = *pos++;
|
nuclear@48
|
582 }
|
nuclear@48
|
583
|
nuclear@48
|
584 res = kd_nearest_range(kd, buf, range);
|
nuclear@48
|
585 #ifndef NO_ALLOCA
|
nuclear@48
|
586 if(kd->dim > 256)
|
nuclear@48
|
587 #else
|
nuclear@48
|
588 if(kd->dim > 16)
|
nuclear@48
|
589 #endif
|
nuclear@48
|
590 free(buf);
|
nuclear@48
|
591 return res;
|
nuclear@48
|
592 }
|
nuclear@48
|
593
|
nuclear@48
|
594 struct kdres *kd_nearest_range3(struct kdtree *tree, double x, double y, double z, double range)
|
nuclear@48
|
595 {
|
nuclear@48
|
596 double buf[3];
|
nuclear@48
|
597 buf[0] = x;
|
nuclear@48
|
598 buf[1] = y;
|
nuclear@48
|
599 buf[2] = z;
|
nuclear@48
|
600 return kd_nearest_range(tree, buf, range);
|
nuclear@48
|
601 }
|
nuclear@48
|
602
|
nuclear@48
|
603 struct kdres *kd_nearest_range3f(struct kdtree *tree, float x, float y, float z, float range)
|
nuclear@48
|
604 {
|
nuclear@48
|
605 double buf[3];
|
nuclear@48
|
606 buf[0] = x;
|
nuclear@48
|
607 buf[1] = y;
|
nuclear@48
|
608 buf[2] = z;
|
nuclear@48
|
609 return kd_nearest_range(tree, buf, range);
|
nuclear@48
|
610 }
|
nuclear@48
|
611
|
nuclear@48
|
612 void kd_res_free(struct kdres *rset)
|
nuclear@48
|
613 {
|
nuclear@48
|
614 clear_results(rset);
|
nuclear@48
|
615 free_resnode(rset->rlist);
|
nuclear@48
|
616 free(rset);
|
nuclear@48
|
617 }
|
nuclear@48
|
618
|
nuclear@48
|
619 int kd_res_size(struct kdres *set)
|
nuclear@48
|
620 {
|
nuclear@48
|
621 return (set->size);
|
nuclear@48
|
622 }
|
nuclear@48
|
623
|
nuclear@48
|
624 void kd_res_rewind(struct kdres *rset)
|
nuclear@48
|
625 {
|
nuclear@48
|
626 rset->riter = rset->rlist->next;
|
nuclear@48
|
627 }
|
nuclear@48
|
628
|
nuclear@48
|
629 int kd_res_end(struct kdres *rset)
|
nuclear@48
|
630 {
|
nuclear@48
|
631 return rset->riter == 0;
|
nuclear@48
|
632 }
|
nuclear@48
|
633
|
nuclear@48
|
634 int kd_res_next(struct kdres *rset)
|
nuclear@48
|
635 {
|
nuclear@48
|
636 rset->riter = rset->riter->next;
|
nuclear@48
|
637 return rset->riter != 0;
|
nuclear@48
|
638 }
|
nuclear@48
|
639
|
nuclear@48
|
640 void *kd_res_item(struct kdres *rset, double *pos)
|
nuclear@48
|
641 {
|
nuclear@48
|
642 if(rset->riter) {
|
nuclear@48
|
643 if(pos) {
|
nuclear@48
|
644 memcpy(pos, rset->riter->item->pos, rset->tree->dim * sizeof *pos);
|
nuclear@48
|
645 }
|
nuclear@48
|
646 return rset->riter->item->data;
|
nuclear@48
|
647 }
|
nuclear@48
|
648 return 0;
|
nuclear@48
|
649 }
|
nuclear@48
|
650
|
nuclear@48
|
651 void *kd_res_itemf(struct kdres *rset, float *pos)
|
nuclear@48
|
652 {
|
nuclear@48
|
653 if(rset->riter) {
|
nuclear@48
|
654 if(pos) {
|
nuclear@48
|
655 int i;
|
nuclear@48
|
656 for(i=0; i<rset->tree->dim; i++) {
|
nuclear@48
|
657 pos[i] = rset->riter->item->pos[i];
|
nuclear@48
|
658 }
|
nuclear@48
|
659 }
|
nuclear@48
|
660 return rset->riter->item->data;
|
nuclear@48
|
661 }
|
nuclear@48
|
662 return 0;
|
nuclear@48
|
663 }
|
nuclear@48
|
664
|
nuclear@48
|
665 void *kd_res_item3(struct kdres *rset, double *x, double *y, double *z)
|
nuclear@48
|
666 {
|
nuclear@48
|
667 if(rset->riter) {
|
nuclear@48
|
668 if(*x) *x = rset->riter->item->pos[0];
|
nuclear@48
|
669 if(*y) *y = rset->riter->item->pos[1];
|
nuclear@48
|
670 if(*z) *z = rset->riter->item->pos[2];
|
nuclear@48
|
671 }
|
nuclear@48
|
672 return 0;
|
nuclear@48
|
673 }
|
nuclear@48
|
674
|
nuclear@48
|
675 void *kd_res_item3f(struct kdres *rset, float *x, float *y, float *z)
|
nuclear@48
|
676 {
|
nuclear@48
|
677 if(rset->riter) {
|
nuclear@48
|
678 if(*x) *x = rset->riter->item->pos[0];
|
nuclear@48
|
679 if(*y) *y = rset->riter->item->pos[1];
|
nuclear@48
|
680 if(*z) *z = rset->riter->item->pos[2];
|
nuclear@48
|
681 }
|
nuclear@48
|
682 return 0;
|
nuclear@48
|
683 }
|
nuclear@48
|
684
|
nuclear@48
|
685 void *kd_res_item_data(struct kdres *set)
|
nuclear@48
|
686 {
|
nuclear@48
|
687 return kd_res_item(set, 0);
|
nuclear@48
|
688 }
|
nuclear@48
|
689
|
nuclear@48
|
690 /* ---- hyperrectangle helpers ---- */
|
nuclear@48
|
691 static struct kdhyperrect* hyperrect_create(int dim, const double *min, const double *max)
|
nuclear@48
|
692 {
|
nuclear@48
|
693 size_t size = dim * sizeof(double);
|
nuclear@48
|
694 struct kdhyperrect* rect = 0;
|
nuclear@48
|
695
|
nuclear@48
|
696 if (!(rect = malloc(sizeof(struct kdhyperrect)))) {
|
nuclear@48
|
697 return 0;
|
nuclear@48
|
698 }
|
nuclear@48
|
699
|
nuclear@48
|
700 rect->dim = dim;
|
nuclear@48
|
701 if (!(rect->min = malloc(size))) {
|
nuclear@48
|
702 free(rect);
|
nuclear@48
|
703 return 0;
|
nuclear@48
|
704 }
|
nuclear@48
|
705 if (!(rect->max = malloc(size))) {
|
nuclear@48
|
706 free(rect->min);
|
nuclear@48
|
707 free(rect);
|
nuclear@48
|
708 return 0;
|
nuclear@48
|
709 }
|
nuclear@48
|
710 memcpy(rect->min, min, size);
|
nuclear@48
|
711 memcpy(rect->max, max, size);
|
nuclear@48
|
712
|
nuclear@48
|
713 return rect;
|
nuclear@48
|
714 }
|
nuclear@48
|
715
|
nuclear@48
|
716 static void hyperrect_free(struct kdhyperrect *rect)
|
nuclear@48
|
717 {
|
nuclear@48
|
718 free(rect->min);
|
nuclear@48
|
719 free(rect->max);
|
nuclear@48
|
720 free(rect);
|
nuclear@48
|
721 }
|
nuclear@48
|
722
|
nuclear@48
|
723 static struct kdhyperrect* hyperrect_duplicate(const struct kdhyperrect *rect)
|
nuclear@48
|
724 {
|
nuclear@48
|
725 return hyperrect_create(rect->dim, rect->min, rect->max);
|
nuclear@48
|
726 }
|
nuclear@48
|
727
|
nuclear@48
|
728 static void hyperrect_extend(struct kdhyperrect *rect, const double *pos)
|
nuclear@48
|
729 {
|
nuclear@48
|
730 int i;
|
nuclear@48
|
731
|
nuclear@48
|
732 for (i=0; i < rect->dim; i++) {
|
nuclear@48
|
733 if (pos[i] < rect->min[i]) {
|
nuclear@48
|
734 rect->min[i] = pos[i];
|
nuclear@48
|
735 }
|
nuclear@48
|
736 if (pos[i] > rect->max[i]) {
|
nuclear@48
|
737 rect->max[i] = pos[i];
|
nuclear@48
|
738 }
|
nuclear@48
|
739 }
|
nuclear@48
|
740 }
|
nuclear@48
|
741
|
nuclear@48
|
742 static double hyperrect_dist_sq(struct kdhyperrect *rect, const double *pos)
|
nuclear@48
|
743 {
|
nuclear@48
|
744 int i;
|
nuclear@48
|
745 double result = 0;
|
nuclear@48
|
746
|
nuclear@48
|
747 for (i=0; i < rect->dim; i++) {
|
nuclear@48
|
748 if (pos[i] < rect->min[i]) {
|
nuclear@48
|
749 result += SQ(rect->min[i] - pos[i]);
|
nuclear@48
|
750 } else if (pos[i] > rect->max[i]) {
|
nuclear@48
|
751 result += SQ(rect->max[i] - pos[i]);
|
nuclear@48
|
752 }
|
nuclear@48
|
753 }
|
nuclear@48
|
754
|
nuclear@48
|
755 return result;
|
nuclear@48
|
756 }
|
nuclear@48
|
757
|
nuclear@48
|
758 /* ---- static helpers ---- */
|
nuclear@48
|
759
|
nuclear@48
|
760 #ifdef USE_LIST_NODE_ALLOCATOR
|
nuclear@48
|
761 /* special list node allocators. */
|
nuclear@48
|
762 static struct res_node *free_nodes;
|
nuclear@48
|
763
|
nuclear@48
|
764 #ifndef NO_PTHREADS
|
nuclear@48
|
765 static pthread_mutex_t alloc_mutex = PTHREAD_MUTEX_INITIALIZER;
|
nuclear@48
|
766 #endif
|
nuclear@48
|
767
|
nuclear@48
|
768 static struct res_node *alloc_resnode(void)
|
nuclear@48
|
769 {
|
nuclear@48
|
770 struct res_node *node;
|
nuclear@48
|
771
|
nuclear@48
|
772 #ifndef NO_PTHREADS
|
nuclear@48
|
773 pthread_mutex_lock(&alloc_mutex);
|
nuclear@48
|
774 #endif
|
nuclear@48
|
775
|
nuclear@48
|
776 if(!free_nodes) {
|
nuclear@48
|
777 node = malloc(sizeof *node);
|
nuclear@48
|
778 } else {
|
nuclear@48
|
779 node = free_nodes;
|
nuclear@48
|
780 free_nodes = free_nodes->next;
|
nuclear@48
|
781 node->next = 0;
|
nuclear@48
|
782 }
|
nuclear@48
|
783
|
nuclear@48
|
784 #ifndef NO_PTHREADS
|
nuclear@48
|
785 pthread_mutex_unlock(&alloc_mutex);
|
nuclear@48
|
786 #endif
|
nuclear@48
|
787
|
nuclear@48
|
788 return node;
|
nuclear@48
|
789 }
|
nuclear@48
|
790
|
nuclear@48
|
791 static void free_resnode(struct res_node *node)
|
nuclear@48
|
792 {
|
nuclear@48
|
793 #ifndef NO_PTHREADS
|
nuclear@48
|
794 pthread_mutex_lock(&alloc_mutex);
|
nuclear@48
|
795 #endif
|
nuclear@48
|
796
|
nuclear@48
|
797 node->next = free_nodes;
|
nuclear@48
|
798 free_nodes = node;
|
nuclear@48
|
799
|
nuclear@48
|
800 #ifndef NO_PTHREADS
|
nuclear@48
|
801 pthread_mutex_unlock(&alloc_mutex);
|
nuclear@48
|
802 #endif
|
nuclear@48
|
803 }
|
nuclear@48
|
804 #endif /* list node allocator or not */
|
nuclear@48
|
805
|
nuclear@48
|
806
|
nuclear@48
|
807 /* inserts the item. if dist_sq is >= 0, then do an ordered insert */
|
nuclear@48
|
808 /* TODO make the ordering code use heapsort */
|
nuclear@48
|
809 static int rlist_insert(struct res_node *list, struct kdnode *item, double dist_sq)
|
nuclear@48
|
810 {
|
nuclear@48
|
811 struct res_node *rnode;
|
nuclear@48
|
812
|
nuclear@48
|
813 if(!(rnode = alloc_resnode())) {
|
nuclear@48
|
814 return -1;
|
nuclear@48
|
815 }
|
nuclear@48
|
816 rnode->item = item;
|
nuclear@48
|
817 rnode->dist_sq = dist_sq;
|
nuclear@48
|
818
|
nuclear@48
|
819 if(dist_sq >= 0.0) {
|
nuclear@48
|
820 while(list->next && list->next->dist_sq < dist_sq) {
|
nuclear@48
|
821 list = list->next;
|
nuclear@48
|
822 }
|
nuclear@48
|
823 }
|
nuclear@48
|
824 rnode->next = list->next;
|
nuclear@48
|
825 list->next = rnode;
|
nuclear@48
|
826 return 0;
|
nuclear@48
|
827 }
|
nuclear@48
|
828
|
nuclear@48
|
829 static void clear_results(struct kdres *rset)
|
nuclear@48
|
830 {
|
nuclear@48
|
831 struct res_node *tmp, *node = rset->rlist->next;
|
nuclear@48
|
832
|
nuclear@48
|
833 while(node) {
|
nuclear@48
|
834 tmp = node;
|
nuclear@48
|
835 node = node->next;
|
nuclear@48
|
836 free_resnode(tmp);
|
nuclear@48
|
837 }
|
nuclear@48
|
838
|
nuclear@48
|
839 rset->rlist->next = 0;
|
nuclear@48
|
840 }
|