Bullet Collision Detection & Physics Library
btQuantizedBvh.cpp
Go to the documentation of this file.
1 /*
2 Bullet Continuous Collision Detection and Physics Library
3 Copyright (c) 2003-2006 Erwin Coumans http://continuousphysics.com/Bullet/
4 
5 This software is provided 'as-is', without any express or implied warranty.
6 In no event will the authors be held liable for any damages arising from the use of this software.
7 Permission is granted to anyone to use this software for any purpose,
8 including commercial applications, and to alter it and redistribute it freely,
9 subject to the following restrictions:
10 
11 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
12 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
13 3. This notice may not be removed or altered from any source distribution.
14 */
15 
16 #include "btQuantizedBvh.h"
17 
18 #include "LinearMath/btAabbUtil2.h"
21 
22 #define RAYAABB2
23 
25  m_useQuantization(false),
26  //m_traversalMode(TRAVERSAL_STACKLESS_CACHE_FRIENDLY)
27  m_traversalMode(TRAVERSAL_STACKLESS)
28  //m_traversalMode(TRAVERSAL_RECURSIVE)
29  ,
30  m_subtreeHeaderCount(0) //PCK: add this line
31 {
34 }
35 
37 {
39  m_useQuantization = true;
40  int numLeafNodes = 0;
41 
43  {
44  //now we have an array of leafnodes in m_leafNodes
45  numLeafNodes = m_quantizedLeafNodes.size();
46 
47  m_quantizedContiguousNodes.resize(2 * numLeafNodes);
48  }
49 
50  m_curNodeIndex = 0;
51 
52  buildTree(0, numLeafNodes);
53 
56  {
59  subtree.m_rootNodeIndex = 0;
60  subtree.m_subtreeSize = m_quantizedContiguousNodes[0].isLeafNode() ? 1 : m_quantizedContiguousNodes[0].getEscapeIndex();
61  }
62 
63  //PCK: update the copy of the size
65 
66  //PCK: clear m_quantizedLeafNodes and m_leafNodes, they are temporary
69 }
70 
72 #ifdef DEBUG_PATCH_COLORS
73 btVector3 color[4] =
74  {
75  btVector3(1, 0, 0),
76  btVector3(0, 1, 0),
77  btVector3(0, 0, 1),
78  btVector3(0, 1, 1)};
79 #endif //DEBUG_PATCH_COLORS
80 
81 void btQuantizedBvh::setQuantizationValues(const btVector3& bvhAabbMin, const btVector3& bvhAabbMax, btScalar quantizationMargin)
82 {
83  //enlarge the AABB to avoid division by zero when initializing the quantization values
84  btVector3 clampValue(quantizationMargin, quantizationMargin, quantizationMargin);
85  m_bvhAabbMin = bvhAabbMin - clampValue;
86  m_bvhAabbMax = bvhAabbMax + clampValue;
87  btVector3 aabbSize = m_bvhAabbMax - m_bvhAabbMin;
88  m_bvhQuantization = btVector3(btScalar(65533.0), btScalar(65533.0), btScalar(65533.0)) / aabbSize;
89 
90  m_useQuantization = true;
91 
92  {
93  unsigned short vecIn[3];
94  btVector3 v;
95  {
96  quantize(vecIn, m_bvhAabbMin, false);
97  v = unQuantize(vecIn);
98  m_bvhAabbMin.setMin(v - clampValue);
99  }
100  aabbSize = m_bvhAabbMax - m_bvhAabbMin;
101  m_bvhQuantization = btVector3(btScalar(65533.0), btScalar(65533.0), btScalar(65533.0)) / aabbSize;
102  {
103  quantize(vecIn, m_bvhAabbMax, true);
104  v = unQuantize(vecIn);
105  m_bvhAabbMax.setMax(v + clampValue);
106  }
107  aabbSize = m_bvhAabbMax - m_bvhAabbMin;
108  m_bvhQuantization = btVector3(btScalar(65533.0), btScalar(65533.0), btScalar(65533.0)) / aabbSize;
109  }
110 }
111 
113 {
114 }
115 
116 #ifdef DEBUG_TREE_BUILDING
117 int gStackDepth = 0;
118 int gMaxStackDepth = 0;
119 #endif //DEBUG_TREE_BUILDING
120 
121 void btQuantizedBvh::buildTree(int startIndex, int endIndex)
122 {
123 #ifdef DEBUG_TREE_BUILDING
124  gStackDepth++;
125  if (gStackDepth > gMaxStackDepth)
126  gMaxStackDepth = gStackDepth;
127 #endif //DEBUG_TREE_BUILDING
128 
129  int splitAxis, splitIndex, i;
130  int numIndices = endIndex - startIndex;
131  int curIndex = m_curNodeIndex;
132 
133  btAssert(numIndices > 0);
134 
135  if (numIndices == 1)
136  {
137 #ifdef DEBUG_TREE_BUILDING
138  gStackDepth--;
139 #endif //DEBUG_TREE_BUILDING
140 
142 
143  m_curNodeIndex++;
144  return;
145  }
146  //calculate Best Splitting Axis and where to split it. Sort the incoming 'leafNodes' array within range 'startIndex/endIndex'.
147 
148  splitAxis = calcSplittingAxis(startIndex, endIndex);
149 
150  splitIndex = sortAndCalcSplittingIndex(startIndex, endIndex, splitAxis);
151 
152  int internalNodeIndex = m_curNodeIndex;
153 
154  //set the min aabb to 'inf' or a max value, and set the max aabb to a -inf/minimum value.
155  //the aabb will be expanded during buildTree/mergeInternalNodeAabb with actual node values
156  setInternalNodeAabbMin(m_curNodeIndex, m_bvhAabbMax); //can't use btVector3(SIMD_INFINITY,SIMD_INFINITY,SIMD_INFINITY)) because of quantization
157  setInternalNodeAabbMax(m_curNodeIndex, m_bvhAabbMin); //can't use btVector3(-SIMD_INFINITY,-SIMD_INFINITY,-SIMD_INFINITY)) because of quantization
158 
159  for (i = startIndex; i < endIndex; i++)
160  {
162  }
163 
164  m_curNodeIndex++;
165 
166  //internalNode->m_escapeIndex;
167 
168  int leftChildNodexIndex = m_curNodeIndex;
169 
170  //build left child tree
171  buildTree(startIndex, splitIndex);
172 
173  int rightChildNodexIndex = m_curNodeIndex;
174  //build right child tree
175  buildTree(splitIndex, endIndex);
176 
177 #ifdef DEBUG_TREE_BUILDING
178  gStackDepth--;
179 #endif //DEBUG_TREE_BUILDING
180 
181  int escapeIndex = m_curNodeIndex - curIndex;
182 
183  if (m_useQuantization)
184  {
185  //escapeIndex is the number of nodes of this subtree
186  const int sizeQuantizedNode = sizeof(btQuantizedBvhNode);
187  const int treeSizeInBytes = escapeIndex * sizeQuantizedNode;
188  if (treeSizeInBytes > MAX_SUBTREE_SIZE_IN_BYTES)
189  {
190  updateSubtreeHeaders(leftChildNodexIndex, rightChildNodexIndex);
191  }
192  }
193  else
194  {
195  }
196 
197  setInternalNodeEscapeIndex(internalNodeIndex, escapeIndex);
198 }
199 
200 void btQuantizedBvh::updateSubtreeHeaders(int leftChildNodexIndex, int rightChildNodexIndex)
201 {
203 
204  btQuantizedBvhNode& leftChildNode = m_quantizedContiguousNodes[leftChildNodexIndex];
205  int leftSubTreeSize = leftChildNode.isLeafNode() ? 1 : leftChildNode.getEscapeIndex();
206  int leftSubTreeSizeInBytes = leftSubTreeSize * static_cast<int>(sizeof(btQuantizedBvhNode));
207 
208  btQuantizedBvhNode& rightChildNode = m_quantizedContiguousNodes[rightChildNodexIndex];
209  int rightSubTreeSize = rightChildNode.isLeafNode() ? 1 : rightChildNode.getEscapeIndex();
210  int rightSubTreeSizeInBytes = rightSubTreeSize * static_cast<int>(sizeof(btQuantizedBvhNode));
211 
212  if (leftSubTreeSizeInBytes <= MAX_SUBTREE_SIZE_IN_BYTES)
213  {
215  subtree.setAabbFromQuantizeNode(leftChildNode);
216  subtree.m_rootNodeIndex = leftChildNodexIndex;
217  subtree.m_subtreeSize = leftSubTreeSize;
218  }
219 
220  if (rightSubTreeSizeInBytes <= MAX_SUBTREE_SIZE_IN_BYTES)
221  {
223  subtree.setAabbFromQuantizeNode(rightChildNode);
224  subtree.m_rootNodeIndex = rightChildNodexIndex;
225  subtree.m_subtreeSize = rightSubTreeSize;
226  }
227 
228  //PCK: update the copy of the size
230 }
231 
232 int btQuantizedBvh::sortAndCalcSplittingIndex(int startIndex, int endIndex, int splitAxis)
233 {
234  int i;
235  int splitIndex = startIndex;
236  int numIndices = endIndex - startIndex;
237  btScalar splitValue;
238 
239  btVector3 means(btScalar(0.), btScalar(0.), btScalar(0.));
240  for (i = startIndex; i < endIndex; i++)
241  {
242  btVector3 center = btScalar(0.5) * (getAabbMax(i) + getAabbMin(i));
243  means += center;
244  }
245  means *= (btScalar(1.) / (btScalar)numIndices);
246 
247  splitValue = means[splitAxis];
248 
249  //sort leafNodes so all values larger then splitValue comes first, and smaller values start from 'splitIndex'.
250  for (i = startIndex; i < endIndex; i++)
251  {
252  btVector3 center = btScalar(0.5) * (getAabbMax(i) + getAabbMin(i));
253  if (center[splitAxis] > splitValue)
254  {
255  //swap
256  swapLeafNodes(i, splitIndex);
257  splitIndex++;
258  }
259  }
260 
261  //if the splitIndex causes unbalanced trees, fix this by using the center in between startIndex and endIndex
262  //otherwise the tree-building might fail due to stack-overflows in certain cases.
263  //unbalanced1 is unsafe: it can cause stack overflows
264  //bool unbalanced1 = ((splitIndex==startIndex) || (splitIndex == (endIndex-1)));
265 
266  //unbalanced2 should work too: always use center (perfect balanced trees)
267  //bool unbalanced2 = true;
268 
269  //this should be safe too:
270  int rangeBalancedIndices = numIndices / 3;
271  bool unbalanced = ((splitIndex <= (startIndex + rangeBalancedIndices)) || (splitIndex >= (endIndex - 1 - rangeBalancedIndices)));
272 
273  if (unbalanced)
274  {
275  splitIndex = startIndex + (numIndices >> 1);
276  }
277 
278  bool unbal = (splitIndex == startIndex) || (splitIndex == (endIndex));
279  (void)unbal;
280  btAssert(!unbal);
281 
282  return splitIndex;
283 }
284 
285 int btQuantizedBvh::calcSplittingAxis(int startIndex, int endIndex)
286 {
287  int i;
288 
289  btVector3 means(btScalar(0.), btScalar(0.), btScalar(0.));
290  btVector3 variance(btScalar(0.), btScalar(0.), btScalar(0.));
291  int numIndices = endIndex - startIndex;
292 
293  for (i = startIndex; i < endIndex; i++)
294  {
295  btVector3 center = btScalar(0.5) * (getAabbMax(i) + getAabbMin(i));
296  means += center;
297  }
298  means *= (btScalar(1.) / (btScalar)numIndices);
299 
300  for (i = startIndex; i < endIndex; i++)
301  {
302  btVector3 center = btScalar(0.5) * (getAabbMax(i) + getAabbMin(i));
303  btVector3 diff2 = center - means;
304  diff2 = diff2 * diff2;
305  variance += diff2;
306  }
307  variance *= (btScalar(1.) / ((btScalar)numIndices - 1));
308 
309  return variance.maxAxis();
310 }
311 
312 void btQuantizedBvh::reportAabbOverlappingNodex(btNodeOverlapCallback* nodeCallback, const btVector3& aabbMin, const btVector3& aabbMax) const
313 {
314  //either choose recursive traversal (walkTree) or stackless (walkStacklessTree)
315 
316  if (m_useQuantization)
317  {
319  unsigned short int quantizedQueryAabbMin[3];
320  unsigned short int quantizedQueryAabbMax[3];
321  quantizeWithClamp(quantizedQueryAabbMin, aabbMin, 0);
322  quantizeWithClamp(quantizedQueryAabbMax, aabbMax, 1);
323 
324  switch (m_traversalMode)
325  {
326  case TRAVERSAL_STACKLESS:
327  walkStacklessQuantizedTree(nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax, 0, m_curNodeIndex);
328  break;
330  walkStacklessQuantizedTreeCacheFriendly(nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax);
331  break;
332  case TRAVERSAL_RECURSIVE:
333  {
334  const btQuantizedBvhNode* rootNode = &m_quantizedContiguousNodes[0];
335  walkRecursiveQuantizedTreeAgainstQueryAabb(rootNode, nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax);
336  }
337  break;
338  default:
339  //unsupported
340  btAssert(0);
341  }
342  }
343  else
344  {
345  walkStacklessTree(nodeCallback, aabbMin, aabbMax);
346  }
347 }
348 
350 
351 void btQuantizedBvh::walkStacklessTree(btNodeOverlapCallback* nodeCallback, const btVector3& aabbMin, const btVector3& aabbMax) const
352 {
354 
355  const btOptimizedBvhNode* rootNode = &m_contiguousNodes[0];
356  int escapeIndex, curIndex = 0;
357  int walkIterations = 0;
358  bool isLeafNode;
359  //PCK: unsigned instead of bool
360  unsigned aabbOverlap;
361 
362  while (curIndex < m_curNodeIndex)
363  {
364  //catch bugs in tree data
365  btAssert(walkIterations < m_curNodeIndex);
366 
367  walkIterations++;
368  aabbOverlap = TestAabbAgainstAabb2(aabbMin, aabbMax, rootNode->m_aabbMinOrg, rootNode->m_aabbMaxOrg);
369  isLeafNode = rootNode->m_escapeIndex == -1;
370 
371  //PCK: unsigned instead of bool
372  if (isLeafNode && (aabbOverlap != 0))
373  {
374  nodeCallback->processNode(rootNode->m_subPart, rootNode->m_triangleIndex);
375  }
376 
377  //PCK: unsigned instead of bool
378  if ((aabbOverlap != 0) || isLeafNode)
379  {
380  rootNode++;
381  curIndex++;
382  }
383  else
384  {
385  escapeIndex = rootNode->m_escapeIndex;
386  rootNode += escapeIndex;
387  curIndex += escapeIndex;
388  }
389  }
390  if (maxIterations < walkIterations)
391  maxIterations = walkIterations;
392 }
393 
394 /*
396 void btQuantizedBvh::walkTree(btOptimizedBvhNode* rootNode,btNodeOverlapCallback* nodeCallback,const btVector3& aabbMin,const btVector3& aabbMax) const
397 {
398  bool isLeafNode, aabbOverlap = TestAabbAgainstAabb2(aabbMin,aabbMax,rootNode->m_aabbMin,rootNode->m_aabbMax);
399  if (aabbOverlap)
400  {
401  isLeafNode = (!rootNode->m_leftChild && !rootNode->m_rightChild);
402  if (isLeafNode)
403  {
404  nodeCallback->processNode(rootNode);
405  } else
406  {
407  walkTree(rootNode->m_leftChild,nodeCallback,aabbMin,aabbMax);
408  walkTree(rootNode->m_rightChild,nodeCallback,aabbMin,aabbMax);
409  }
410  }
411 
412 }
413 */
414 
415 void btQuantizedBvh::walkRecursiveQuantizedTreeAgainstQueryAabb(const btQuantizedBvhNode* currentNode, btNodeOverlapCallback* nodeCallback, unsigned short int* quantizedQueryAabbMin, unsigned short int* quantizedQueryAabbMax) const
416 {
418 
419  bool isLeafNode;
420  //PCK: unsigned instead of bool
421  unsigned aabbOverlap;
422 
423  //PCK: unsigned instead of bool
424  aabbOverlap = testQuantizedAabbAgainstQuantizedAabb(quantizedQueryAabbMin, quantizedQueryAabbMax, currentNode->m_quantizedAabbMin, currentNode->m_quantizedAabbMax);
425  isLeafNode = currentNode->isLeafNode();
426 
427  //PCK: unsigned instead of bool
428  if (aabbOverlap != 0)
429  {
430  if (isLeafNode)
431  {
432  nodeCallback->processNode(currentNode->getPartId(), currentNode->getTriangleIndex());
433  }
434  else
435  {
436  //process left and right children
437  const btQuantizedBvhNode* leftChildNode = currentNode + 1;
438  walkRecursiveQuantizedTreeAgainstQueryAabb(leftChildNode, nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax);
439 
440  const btQuantizedBvhNode* rightChildNode = leftChildNode->isLeafNode() ? leftChildNode + 1 : leftChildNode + leftChildNode->getEscapeIndex();
441  walkRecursiveQuantizedTreeAgainstQueryAabb(rightChildNode, nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax);
442  }
443  }
444 }
445 
446 void btQuantizedBvh::walkStacklessTreeAgainstRay(btNodeOverlapCallback* nodeCallback, const btVector3& raySource, const btVector3& rayTarget, const btVector3& aabbMin, const btVector3& aabbMax, int startNodeIndex, int endNodeIndex) const
447 {
449 
450  const btOptimizedBvhNode* rootNode = &m_contiguousNodes[0];
451  int escapeIndex, curIndex = 0;
452  int walkIterations = 0;
453  bool isLeafNode;
454  //PCK: unsigned instead of bool
455  unsigned aabbOverlap = 0;
456  unsigned rayBoxOverlap = 0;
457  btScalar lambda_max = 1.0;
458 
459  /* Quick pruning by quantized box */
460  btVector3 rayAabbMin = raySource;
461  btVector3 rayAabbMax = raySource;
462  rayAabbMin.setMin(rayTarget);
463  rayAabbMax.setMax(rayTarget);
464 
465  /* Add box cast extents to bounding box */
466  rayAabbMin += aabbMin;
467  rayAabbMax += aabbMax;
468 
469 #ifdef RAYAABB2
470  btVector3 rayDir = (rayTarget - raySource);
471  rayDir.normalize();
472  lambda_max = rayDir.dot(rayTarget - raySource);
474  btVector3 rayDirectionInverse;
475  rayDirectionInverse[0] = rayDir[0] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[0];
476  rayDirectionInverse[1] = rayDir[1] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[1];
477  rayDirectionInverse[2] = rayDir[2] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[2];
478  unsigned int sign[3] = {rayDirectionInverse[0] < 0.0, rayDirectionInverse[1] < 0.0, rayDirectionInverse[2] < 0.0};
479 #endif
480 
481  btVector3 bounds[2];
482 
483  while (curIndex < m_curNodeIndex)
484  {
485  btScalar param = 1.0;
486  //catch bugs in tree data
487  btAssert(walkIterations < m_curNodeIndex);
488 
489  walkIterations++;
490 
491  bounds[0] = rootNode->m_aabbMinOrg;
492  bounds[1] = rootNode->m_aabbMaxOrg;
493  /* Add box cast extents */
494  bounds[0] -= aabbMax;
495  bounds[1] -= aabbMin;
496 
497  aabbOverlap = TestAabbAgainstAabb2(rayAabbMin, rayAabbMax, rootNode->m_aabbMinOrg, rootNode->m_aabbMaxOrg);
498  //perhaps profile if it is worth doing the aabbOverlap test first
499 
500 #ifdef RAYAABB2
501  rayBoxOverlap = aabbOverlap ? btRayAabb2(raySource, rayDirectionInverse, sign, bounds, param, 0.0f, lambda_max) : false;
505 
506 #else
507  btVector3 normal;
508  rayBoxOverlap = btRayAabb(raySource, rayTarget, bounds[0], bounds[1], param, normal);
509 #endif
510 
511  isLeafNode = rootNode->m_escapeIndex == -1;
512 
513  //PCK: unsigned instead of bool
514  if (isLeafNode && (rayBoxOverlap != 0))
515  {
516  nodeCallback->processNode(rootNode->m_subPart, rootNode->m_triangleIndex);
517  }
518 
519  //PCK: unsigned instead of bool
520  if ((rayBoxOverlap != 0) || isLeafNode)
521  {
522  rootNode++;
523  curIndex++;
524  }
525  else
526  {
527  escapeIndex = rootNode->m_escapeIndex;
528  rootNode += escapeIndex;
529  curIndex += escapeIndex;
530  }
531  }
532  if (maxIterations < walkIterations)
533  maxIterations = walkIterations;
534 }
535 
536 void btQuantizedBvh::walkStacklessQuantizedTreeAgainstRay(btNodeOverlapCallback* nodeCallback, const btVector3& raySource, const btVector3& rayTarget, const btVector3& aabbMin, const btVector3& aabbMax, int startNodeIndex, int endNodeIndex) const
537 {
539 
540  int curIndex = startNodeIndex;
541  int walkIterations = 0;
542  int subTreeSize = endNodeIndex - startNodeIndex;
543  (void)subTreeSize;
544 
545  const btQuantizedBvhNode* rootNode = &m_quantizedContiguousNodes[startNodeIndex];
546  int escapeIndex;
547 
548  bool isLeafNode;
549  //PCK: unsigned instead of bool
550  unsigned boxBoxOverlap = 0;
551  unsigned rayBoxOverlap = 0;
552 
553  btScalar lambda_max = 1.0;
554 
555 #ifdef RAYAABB2
556  btVector3 rayDirection = (rayTarget - raySource);
557  rayDirection.normalize();
558  lambda_max = rayDirection.dot(rayTarget - raySource);
560  rayDirection[0] = rayDirection[0] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDirection[0];
561  rayDirection[1] = rayDirection[1] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDirection[1];
562  rayDirection[2] = rayDirection[2] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDirection[2];
563  unsigned int sign[3] = {rayDirection[0] < 0.0, rayDirection[1] < 0.0, rayDirection[2] < 0.0};
564 #endif
565 
566  /* Quick pruning by quantized box */
567  btVector3 rayAabbMin = raySource;
568  btVector3 rayAabbMax = raySource;
569  rayAabbMin.setMin(rayTarget);
570  rayAabbMax.setMax(rayTarget);
571 
572  /* Add box cast extents to bounding box */
573  rayAabbMin += aabbMin;
574  rayAabbMax += aabbMax;
575 
576  unsigned short int quantizedQueryAabbMin[3];
577  unsigned short int quantizedQueryAabbMax[3];
578  quantizeWithClamp(quantizedQueryAabbMin, rayAabbMin, 0);
579  quantizeWithClamp(quantizedQueryAabbMax, rayAabbMax, 1);
580 
581  while (curIndex < endNodeIndex)
582  {
583 //#define VISUALLY_ANALYZE_BVH 1
584 #ifdef VISUALLY_ANALYZE_BVH
585  //some code snippet to debugDraw aabb, to visually analyze bvh structure
586  static int drawPatch = 0;
587  //need some global access to a debugDrawer
588  extern btIDebugDraw* debugDrawerPtr;
589  if (curIndex == drawPatch)
590  {
591  btVector3 aabbMin, aabbMax;
592  aabbMin = unQuantize(rootNode->m_quantizedAabbMin);
593  aabbMax = unQuantize(rootNode->m_quantizedAabbMax);
594  btVector3 color(1, 0, 0);
595  debugDrawerPtr->drawAabb(aabbMin, aabbMax, color);
596  }
597 #endif //VISUALLY_ANALYZE_BVH
598 
599  //catch bugs in tree data
600  btAssert(walkIterations < subTreeSize);
601 
602  walkIterations++;
603  //PCK: unsigned instead of bool
604  // only interested if this is closer than any previous hit
605  btScalar param = 1.0;
606  rayBoxOverlap = 0;
607  boxBoxOverlap = testQuantizedAabbAgainstQuantizedAabb(quantizedQueryAabbMin, quantizedQueryAabbMax, rootNode->m_quantizedAabbMin, rootNode->m_quantizedAabbMax);
608  isLeafNode = rootNode->isLeafNode();
609  if (boxBoxOverlap)
610  {
611  btVector3 bounds[2];
612  bounds[0] = unQuantize(rootNode->m_quantizedAabbMin);
613  bounds[1] = unQuantize(rootNode->m_quantizedAabbMax);
614  /* Add box cast extents */
615  bounds[0] -= aabbMax;
616  bounds[1] -= aabbMin;
617  btVector3 normal;
618 #if 0
619  bool ra2 = btRayAabb2 (raySource, rayDirection, sign, bounds, param, 0.0, lambda_max);
620  bool ra = btRayAabb (raySource, rayTarget, bounds[0], bounds[1], param, normal);
621  if (ra2 != ra)
622  {
623  printf("functions don't match\n");
624  }
625 #endif
626 #ifdef RAYAABB2
627 
631  //BT_PROFILE("btRayAabb2");
632  rayBoxOverlap = btRayAabb2(raySource, rayDirection, sign, bounds, param, 0.0f, lambda_max);
633 
634 #else
635  rayBoxOverlap = true; //btRayAabb(raySource, rayTarget, bounds[0], bounds[1], param, normal);
636 #endif
637  }
638 
639  if (isLeafNode && rayBoxOverlap)
640  {
641  nodeCallback->processNode(rootNode->getPartId(), rootNode->getTriangleIndex());
642  }
643 
644  //PCK: unsigned instead of bool
645  if ((rayBoxOverlap != 0) || isLeafNode)
646  {
647  rootNode++;
648  curIndex++;
649  }
650  else
651  {
652  escapeIndex = rootNode->getEscapeIndex();
653  rootNode += escapeIndex;
654  curIndex += escapeIndex;
655  }
656  }
657  if (maxIterations < walkIterations)
658  maxIterations = walkIterations;
659 }
660 
661 void btQuantizedBvh::walkStacklessQuantizedTree(btNodeOverlapCallback* nodeCallback, unsigned short int* quantizedQueryAabbMin, unsigned short int* quantizedQueryAabbMax, int startNodeIndex, int endNodeIndex) const
662 {
664 
665  int curIndex = startNodeIndex;
666  int walkIterations = 0;
667  int subTreeSize = endNodeIndex - startNodeIndex;
668  (void)subTreeSize;
669 
670  const btQuantizedBvhNode* rootNode = &m_quantizedContiguousNodes[startNodeIndex];
671  int escapeIndex;
672 
673  bool isLeafNode;
674  //PCK: unsigned instead of bool
675  unsigned aabbOverlap;
676 
677  while (curIndex < endNodeIndex)
678  {
679 //#define VISUALLY_ANALYZE_BVH 1
680 #ifdef VISUALLY_ANALYZE_BVH
681  //some code snippet to debugDraw aabb, to visually analyze bvh structure
682  static int drawPatch = 0;
683  //need some global access to a debugDrawer
684  extern btIDebugDraw* debugDrawerPtr;
685  if (curIndex == drawPatch)
686  {
687  btVector3 aabbMin, aabbMax;
688  aabbMin = unQuantize(rootNode->m_quantizedAabbMin);
689  aabbMax = unQuantize(rootNode->m_quantizedAabbMax);
690  btVector3 color(1, 0, 0);
691  debugDrawerPtr->drawAabb(aabbMin, aabbMax, color);
692  }
693 #endif //VISUALLY_ANALYZE_BVH
694 
695  //catch bugs in tree data
696  btAssert(walkIterations < subTreeSize);
697 
698  walkIterations++;
699  //PCK: unsigned instead of bool
700  aabbOverlap = testQuantizedAabbAgainstQuantizedAabb(quantizedQueryAabbMin, quantizedQueryAabbMax, rootNode->m_quantizedAabbMin, rootNode->m_quantizedAabbMax);
701  isLeafNode = rootNode->isLeafNode();
702 
703  if (isLeafNode && aabbOverlap)
704  {
705  nodeCallback->processNode(rootNode->getPartId(), rootNode->getTriangleIndex());
706  }
707 
708  //PCK: unsigned instead of bool
709  if ((aabbOverlap != 0) || isLeafNode)
710  {
711  rootNode++;
712  curIndex++;
713  }
714  else
715  {
716  escapeIndex = rootNode->getEscapeIndex();
717  rootNode += escapeIndex;
718  curIndex += escapeIndex;
719  }
720  }
721  if (maxIterations < walkIterations)
722  maxIterations = walkIterations;
723 }
724 
725 //This traversal can be called from Playstation 3 SPU
726 void btQuantizedBvh::walkStacklessQuantizedTreeCacheFriendly(btNodeOverlapCallback* nodeCallback, unsigned short int* quantizedQueryAabbMin, unsigned short int* quantizedQueryAabbMax) const
727 {
729 
730  int i;
731 
732  for (i = 0; i < this->m_SubtreeHeaders.size(); i++)
733  {
734  const btBvhSubtreeInfo& subtree = m_SubtreeHeaders[i];
735 
736  //PCK: unsigned instead of bool
737  unsigned overlap = testQuantizedAabbAgainstQuantizedAabb(quantizedQueryAabbMin, quantizedQueryAabbMax, subtree.m_quantizedAabbMin, subtree.m_quantizedAabbMax);
738  if (overlap != 0)
739  {
740  walkStacklessQuantizedTree(nodeCallback, quantizedQueryAabbMin, quantizedQueryAabbMax,
741  subtree.m_rootNodeIndex,
742  subtree.m_rootNodeIndex + subtree.m_subtreeSize);
743  }
744  }
745 }
746 
747 void btQuantizedBvh::reportRayOverlappingNodex(btNodeOverlapCallback* nodeCallback, const btVector3& raySource, const btVector3& rayTarget) const
748 {
749  reportBoxCastOverlappingNodex(nodeCallback, raySource, rayTarget, btVector3(0, 0, 0), btVector3(0, 0, 0));
750 }
751 
752 void btQuantizedBvh::reportBoxCastOverlappingNodex(btNodeOverlapCallback* nodeCallback, const btVector3& raySource, const btVector3& rayTarget, const btVector3& aabbMin, const btVector3& aabbMax) const
753 {
754  //always use stackless
755 
756  if (m_useQuantization)
757  {
758  walkStacklessQuantizedTreeAgainstRay(nodeCallback, raySource, rayTarget, aabbMin, aabbMax, 0, m_curNodeIndex);
759  }
760  else
761  {
762  walkStacklessTreeAgainstRay(nodeCallback, raySource, rayTarget, aabbMin, aabbMax, 0, m_curNodeIndex);
763  }
764  /*
765  {
766  //recursive traversal
767  btVector3 qaabbMin = raySource;
768  btVector3 qaabbMax = raySource;
769  qaabbMin.setMin(rayTarget);
770  qaabbMax.setMax(rayTarget);
771  qaabbMin += aabbMin;
772  qaabbMax += aabbMax;
773  reportAabbOverlappingNodex(nodeCallback,qaabbMin,qaabbMax);
774  }
775  */
776 }
777 
778 void btQuantizedBvh::swapLeafNodes(int i, int splitIndex)
779 {
780  if (m_useQuantization)
781  {
784  m_quantizedLeafNodes[splitIndex] = tmp;
785  }
786  else
787  {
789  m_leafNodes[i] = m_leafNodes[splitIndex];
790  m_leafNodes[splitIndex] = tmp;
791  }
792 }
793 
794 void btQuantizedBvh::assignInternalNodeFromLeafNode(int internalNode, int leafNodeIndex)
795 {
796  if (m_useQuantization)
797  {
798  m_quantizedContiguousNodes[internalNode] = m_quantizedLeafNodes[leafNodeIndex];
799  }
800  else
801  {
802  m_contiguousNodes[internalNode] = m_leafNodes[leafNodeIndex];
803  }
804 }
805 
806 //PCK: include
807 #include <new>
808 
809 #if 0
810 //PCK: consts
811 static const unsigned BVH_ALIGNMENT = 16;
812 static const unsigned BVH_ALIGNMENT_MASK = BVH_ALIGNMENT-1;
813 
814 static const unsigned BVH_ALIGNMENT_BLOCKS = 2;
815 #endif
816 
818 {
819  // I changed this to 0 since the extra padding is not needed or used.
820  return 0; //BVH_ALIGNMENT_BLOCKS * BVH_ALIGNMENT;
821 }
822 
824 {
825  unsigned baseSize = sizeof(btQuantizedBvh) + getAlignmentSerializationPadding();
826  baseSize += sizeof(btBvhSubtreeInfo) * m_subtreeHeaderCount;
827  if (m_useQuantization)
828  {
829  return baseSize + m_curNodeIndex * sizeof(btQuantizedBvhNode);
830  }
831  return baseSize + m_curNodeIndex * sizeof(btOptimizedBvhNode);
832 }
833 
834 bool btQuantizedBvh::serialize(void* o_alignedDataBuffer, unsigned /*i_dataBufferSize */, bool i_swapEndian) const
835 {
838 
839  /* if (i_dataBufferSize < calculateSerializeBufferSize() || o_alignedDataBuffer == NULL || (((unsigned)o_alignedDataBuffer & BVH_ALIGNMENT_MASK) != 0))
840  {
842  btAssert(0);
843  return false;
844  }
845 */
846 
847  btQuantizedBvh* targetBvh = (btQuantizedBvh*)o_alignedDataBuffer;
848 
849  // construct the class so the virtual function table, etc will be set up
850  // Also, m_leafNodes and m_quantizedLeafNodes will be initialized to default values by the constructor
851  new (targetBvh) btQuantizedBvh;
852 
853  if (i_swapEndian)
854  {
855  targetBvh->m_curNodeIndex = static_cast<int>(btSwapEndian(m_curNodeIndex));
856 
860 
862  targetBvh->m_subtreeHeaderCount = static_cast<int>(btSwapEndian(m_subtreeHeaderCount));
863  }
864  else
865  {
866  targetBvh->m_curNodeIndex = m_curNodeIndex;
867  targetBvh->m_bvhAabbMin = m_bvhAabbMin;
868  targetBvh->m_bvhAabbMax = m_bvhAabbMax;
870  targetBvh->m_traversalMode = m_traversalMode;
872  }
873 
875 
876  unsigned char* nodeData = (unsigned char*)targetBvh;
877  nodeData += sizeof(btQuantizedBvh);
878 
879  unsigned sizeToAdd = 0; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
880  nodeData += sizeToAdd;
881 
882  int nodeCount = m_curNodeIndex;
883 
884  if (m_useQuantization)
885  {
886  targetBvh->m_quantizedContiguousNodes.initializeFromBuffer(nodeData, nodeCount, nodeCount);
887 
888  if (i_swapEndian)
889  {
890  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
891  {
892  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0]);
893  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1]);
894  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2]);
895 
896  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0]);
897  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1]);
898  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2] = btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2]);
899 
900  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex = static_cast<int>(btSwapEndian(m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex));
901  }
902  }
903  else
904  {
905  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
906  {
907  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0];
908  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1];
909  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2];
910 
911  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0];
912  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1];
913  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2] = m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2];
914 
915  targetBvh->m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex = m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex;
916  }
917  }
918  nodeData += sizeof(btQuantizedBvhNode) * nodeCount;
919 
920  // this clears the pointer in the member variable it doesn't really do anything to the data
921  // it does call the destructor on the contained objects, but they are all classes with no destructor defined
922  // so the memory (which is not freed) is left alone
923  targetBvh->m_quantizedContiguousNodes.initializeFromBuffer(NULL, 0, 0);
924  }
925  else
926  {
927  targetBvh->m_contiguousNodes.initializeFromBuffer(nodeData, nodeCount, nodeCount);
928 
929  if (i_swapEndian)
930  {
931  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
932  {
933  btSwapVector3Endian(m_contiguousNodes[nodeIndex].m_aabbMinOrg, targetBvh->m_contiguousNodes[nodeIndex].m_aabbMinOrg);
934  btSwapVector3Endian(m_contiguousNodes[nodeIndex].m_aabbMaxOrg, targetBvh->m_contiguousNodes[nodeIndex].m_aabbMaxOrg);
935 
936  targetBvh->m_contiguousNodes[nodeIndex].m_escapeIndex = static_cast<int>(btSwapEndian(m_contiguousNodes[nodeIndex].m_escapeIndex));
937  targetBvh->m_contiguousNodes[nodeIndex].m_subPart = static_cast<int>(btSwapEndian(m_contiguousNodes[nodeIndex].m_subPart));
938  targetBvh->m_contiguousNodes[nodeIndex].m_triangleIndex = static_cast<int>(btSwapEndian(m_contiguousNodes[nodeIndex].m_triangleIndex));
939  }
940  }
941  else
942  {
943  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
944  {
945  targetBvh->m_contiguousNodes[nodeIndex].m_aabbMinOrg = m_contiguousNodes[nodeIndex].m_aabbMinOrg;
946  targetBvh->m_contiguousNodes[nodeIndex].m_aabbMaxOrg = m_contiguousNodes[nodeIndex].m_aabbMaxOrg;
947 
948  targetBvh->m_contiguousNodes[nodeIndex].m_escapeIndex = m_contiguousNodes[nodeIndex].m_escapeIndex;
949  targetBvh->m_contiguousNodes[nodeIndex].m_subPart = m_contiguousNodes[nodeIndex].m_subPart;
950  targetBvh->m_contiguousNodes[nodeIndex].m_triangleIndex = m_contiguousNodes[nodeIndex].m_triangleIndex;
951  }
952  }
953  nodeData += sizeof(btOptimizedBvhNode) * nodeCount;
954 
955  // this clears the pointer in the member variable it doesn't really do anything to the data
956  // it does call the destructor on the contained objects, but they are all classes with no destructor defined
957  // so the memory (which is not freed) is left alone
958  targetBvh->m_contiguousNodes.initializeFromBuffer(NULL, 0, 0);
959  }
960 
961  sizeToAdd = 0; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
962  nodeData += sizeToAdd;
963 
964  // Now serialize the subtree headers
966  if (i_swapEndian)
967  {
968  for (int i = 0; i < m_subtreeHeaderCount; i++)
969  {
970  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[0] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMin[0]);
971  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[1] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMin[1]);
972  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[2] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMin[2]);
973 
974  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[0] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMax[0]);
975  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[1] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMax[1]);
976  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[2] = btSwapEndian(m_SubtreeHeaders[i].m_quantizedAabbMax[2]);
977 
978  targetBvh->m_SubtreeHeaders[i].m_rootNodeIndex = static_cast<int>(btSwapEndian(m_SubtreeHeaders[i].m_rootNodeIndex));
979  targetBvh->m_SubtreeHeaders[i].m_subtreeSize = static_cast<int>(btSwapEndian(m_SubtreeHeaders[i].m_subtreeSize));
980  }
981  }
982  else
983  {
984  for (int i = 0; i < m_subtreeHeaderCount; i++)
985  {
986  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[0] = (m_SubtreeHeaders[i].m_quantizedAabbMin[0]);
987  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[1] = (m_SubtreeHeaders[i].m_quantizedAabbMin[1]);
988  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMin[2] = (m_SubtreeHeaders[i].m_quantizedAabbMin[2]);
989 
990  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[0] = (m_SubtreeHeaders[i].m_quantizedAabbMax[0]);
991  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[1] = (m_SubtreeHeaders[i].m_quantizedAabbMax[1]);
992  targetBvh->m_SubtreeHeaders[i].m_quantizedAabbMax[2] = (m_SubtreeHeaders[i].m_quantizedAabbMax[2]);
993 
994  targetBvh->m_SubtreeHeaders[i].m_rootNodeIndex = (m_SubtreeHeaders[i].m_rootNodeIndex);
995  targetBvh->m_SubtreeHeaders[i].m_subtreeSize = (m_SubtreeHeaders[i].m_subtreeSize);
996 
997  // need to clear padding in destination buffer
998  targetBvh->m_SubtreeHeaders[i].m_padding[0] = 0;
999  targetBvh->m_SubtreeHeaders[i].m_padding[1] = 0;
1000  targetBvh->m_SubtreeHeaders[i].m_padding[2] = 0;
1001  }
1002  }
1003  nodeData += sizeof(btBvhSubtreeInfo) * m_subtreeHeaderCount;
1004 
1005  // this clears the pointer in the member variable it doesn't really do anything to the data
1006  // it does call the destructor on the contained objects, but they are all classes with no destructor defined
1007  // so the memory (which is not freed) is left alone
1008  targetBvh->m_SubtreeHeaders.initializeFromBuffer(NULL, 0, 0);
1009 
1010  // this wipes the virtual function table pointer at the start of the buffer for the class
1011  *((void**)o_alignedDataBuffer) = NULL;
1012 
1013  return true;
1014 }
1015 
1016 btQuantizedBvh* btQuantizedBvh::deSerializeInPlace(void* i_alignedDataBuffer, unsigned int i_dataBufferSize, bool i_swapEndian)
1017 {
1018  if (i_alignedDataBuffer == NULL) // || (((unsigned)i_alignedDataBuffer & BVH_ALIGNMENT_MASK) != 0))
1019  {
1020  return NULL;
1021  }
1022  btQuantizedBvh* bvh = (btQuantizedBvh*)i_alignedDataBuffer;
1023 
1024  if (i_swapEndian)
1025  {
1026  bvh->m_curNodeIndex = static_cast<int>(btSwapEndian(bvh->m_curNodeIndex));
1027 
1031 
1033  bvh->m_subtreeHeaderCount = static_cast<int>(btSwapEndian(bvh->m_subtreeHeaderCount));
1034  }
1035 
1036  unsigned int calculatedBufSize = bvh->calculateSerializeBufferSize();
1037  btAssert(calculatedBufSize <= i_dataBufferSize);
1038 
1039  if (calculatedBufSize > i_dataBufferSize)
1040  {
1041  return NULL;
1042  }
1043 
1044  unsigned char* nodeData = (unsigned char*)bvh;
1045  nodeData += sizeof(btQuantizedBvh);
1046 
1047  unsigned sizeToAdd = 0; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
1048  nodeData += sizeToAdd;
1049 
1050  int nodeCount = bvh->m_curNodeIndex;
1051 
1052  // Must call placement new to fill in virtual function table, etc, but we don't want to overwrite most data, so call a special version of the constructor
1053  // Also, m_leafNodes and m_quantizedLeafNodes will be initialized to default values by the constructor
1054  new (bvh) btQuantizedBvh(*bvh, false);
1055 
1056  if (bvh->m_useQuantization)
1057  {
1058  bvh->m_quantizedContiguousNodes.initializeFromBuffer(nodeData, nodeCount, nodeCount);
1059 
1060  if (i_swapEndian)
1061  {
1062  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
1063  {
1064  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0]);
1065  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[1]);
1066  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[2]);
1067 
1068  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0]);
1069  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[1]);
1070  bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2] = btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[2]);
1071 
1072  bvh->m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex = static_cast<int>(btSwapEndian(bvh->m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex));
1073  }
1074  }
1075  nodeData += sizeof(btQuantizedBvhNode) * nodeCount;
1076  }
1077  else
1078  {
1079  bvh->m_contiguousNodes.initializeFromBuffer(nodeData, nodeCount, nodeCount);
1080 
1081  if (i_swapEndian)
1082  {
1083  for (int nodeIndex = 0; nodeIndex < nodeCount; nodeIndex++)
1084  {
1085  btUnSwapVector3Endian(bvh->m_contiguousNodes[nodeIndex].m_aabbMinOrg);
1086  btUnSwapVector3Endian(bvh->m_contiguousNodes[nodeIndex].m_aabbMaxOrg);
1087 
1088  bvh->m_contiguousNodes[nodeIndex].m_escapeIndex = static_cast<int>(btSwapEndian(bvh->m_contiguousNodes[nodeIndex].m_escapeIndex));
1089  bvh->m_contiguousNodes[nodeIndex].m_subPart = static_cast<int>(btSwapEndian(bvh->m_contiguousNodes[nodeIndex].m_subPart));
1090  bvh->m_contiguousNodes[nodeIndex].m_triangleIndex = static_cast<int>(btSwapEndian(bvh->m_contiguousNodes[nodeIndex].m_triangleIndex));
1091  }
1092  }
1093  nodeData += sizeof(btOptimizedBvhNode) * nodeCount;
1094  }
1095 
1096  sizeToAdd = 0; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
1097  nodeData += sizeToAdd;
1098 
1099  // Now serialize the subtree headers
1101  if (i_swapEndian)
1102  {
1103  for (int i = 0; i < bvh->m_subtreeHeaderCount; i++)
1104  {
1105  bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[0] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[0]);
1106  bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[1] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[1]);
1107  bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[2] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMin[2]);
1108 
1109  bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[0] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[0]);
1110  bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[1] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[1]);
1111  bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[2] = btSwapEndian(bvh->m_SubtreeHeaders[i].m_quantizedAabbMax[2]);
1112 
1113  bvh->m_SubtreeHeaders[i].m_rootNodeIndex = static_cast<int>(btSwapEndian(bvh->m_SubtreeHeaders[i].m_rootNodeIndex));
1114  bvh->m_SubtreeHeaders[i].m_subtreeSize = static_cast<int>(btSwapEndian(bvh->m_SubtreeHeaders[i].m_subtreeSize));
1115  }
1116  }
1117 
1118  return bvh;
1119 }
1120 
1121 // Constructor that prevents btVector3's default constructor from being called
1122 btQuantizedBvh::btQuantizedBvh(btQuantizedBvh& self, bool /* ownsMemory */) : m_bvhAabbMin(self.m_bvhAabbMin),
1123  m_bvhAabbMax(self.m_bvhAabbMax),
1124  m_bvhQuantization(self.m_bvhQuantization),
1125  m_bulletVersion(BT_BULLET_VERSION)
1126 {
1127 }
1128 
1130 {
1131  m_bvhAabbMax.deSerializeFloat(quantizedBvhFloatData.m_bvhAabbMax);
1132  m_bvhAabbMin.deSerializeFloat(quantizedBvhFloatData.m_bvhAabbMin);
1133  m_bvhQuantization.deSerializeFloat(quantizedBvhFloatData.m_bvhQuantization);
1134 
1135  m_curNodeIndex = quantizedBvhFloatData.m_curNodeIndex;
1136  m_useQuantization = quantizedBvhFloatData.m_useQuantization != 0;
1137 
1138  {
1139  int numElem = quantizedBvhFloatData.m_numContiguousLeafNodes;
1140  m_contiguousNodes.resize(numElem);
1141 
1142  if (numElem)
1143  {
1144  btOptimizedBvhNodeFloatData* memPtr = quantizedBvhFloatData.m_contiguousNodesPtr;
1145 
1146  for (int i = 0; i < numElem; i++, memPtr++)
1147  {
1148  m_contiguousNodes[i].m_aabbMaxOrg.deSerializeFloat(memPtr->m_aabbMaxOrg);
1149  m_contiguousNodes[i].m_aabbMinOrg.deSerializeFloat(memPtr->m_aabbMinOrg);
1150  m_contiguousNodes[i].m_escapeIndex = memPtr->m_escapeIndex;
1151  m_contiguousNodes[i].m_subPart = memPtr->m_subPart;
1152  m_contiguousNodes[i].m_triangleIndex = memPtr->m_triangleIndex;
1153  }
1154  }
1155  }
1156 
1157  {
1158  int numElem = quantizedBvhFloatData.m_numQuantizedContiguousNodes;
1160 
1161  if (numElem)
1162  {
1163  btQuantizedBvhNodeData* memPtr = quantizedBvhFloatData.m_quantizedContiguousNodesPtr;
1164  for (int i = 0; i < numElem; i++, memPtr++)
1165  {
1166  m_quantizedContiguousNodes[i].m_escapeIndexOrTriangleIndex = memPtr->m_escapeIndexOrTriangleIndex;
1167  m_quantizedContiguousNodes[i].m_quantizedAabbMax[0] = memPtr->m_quantizedAabbMax[0];
1168  m_quantizedContiguousNodes[i].m_quantizedAabbMax[1] = memPtr->m_quantizedAabbMax[1];
1169  m_quantizedContiguousNodes[i].m_quantizedAabbMax[2] = memPtr->m_quantizedAabbMax[2];
1170  m_quantizedContiguousNodes[i].m_quantizedAabbMin[0] = memPtr->m_quantizedAabbMin[0];
1171  m_quantizedContiguousNodes[i].m_quantizedAabbMin[1] = memPtr->m_quantizedAabbMin[1];
1172  m_quantizedContiguousNodes[i].m_quantizedAabbMin[2] = memPtr->m_quantizedAabbMin[2];
1173  }
1174  }
1175  }
1176 
1177  m_traversalMode = btTraversalMode(quantizedBvhFloatData.m_traversalMode);
1178 
1179  {
1180  int numElem = quantizedBvhFloatData.m_numSubtreeHeaders;
1181  m_SubtreeHeaders.resize(numElem);
1182  if (numElem)
1183  {
1184  btBvhSubtreeInfoData* memPtr = quantizedBvhFloatData.m_subTreeInfoPtr;
1185  for (int i = 0; i < numElem; i++, memPtr++)
1186  {
1187  m_SubtreeHeaders[i].m_quantizedAabbMax[0] = memPtr->m_quantizedAabbMax[0];
1188  m_SubtreeHeaders[i].m_quantizedAabbMax[1] = memPtr->m_quantizedAabbMax[1];
1189  m_SubtreeHeaders[i].m_quantizedAabbMax[2] = memPtr->m_quantizedAabbMax[2];
1190  m_SubtreeHeaders[i].m_quantizedAabbMin[0] = memPtr->m_quantizedAabbMin[0];
1191  m_SubtreeHeaders[i].m_quantizedAabbMin[1] = memPtr->m_quantizedAabbMin[1];
1192  m_SubtreeHeaders[i].m_quantizedAabbMin[2] = memPtr->m_quantizedAabbMin[2];
1193  m_SubtreeHeaders[i].m_rootNodeIndex = memPtr->m_rootNodeIndex;
1194  m_SubtreeHeaders[i].m_subtreeSize = memPtr->m_subtreeSize;
1195  }
1196  }
1197  }
1198 }
1199 
1201 {
1202  m_bvhAabbMax.deSerializeDouble(quantizedBvhDoubleData.m_bvhAabbMax);
1203  m_bvhAabbMin.deSerializeDouble(quantizedBvhDoubleData.m_bvhAabbMin);
1204  m_bvhQuantization.deSerializeDouble(quantizedBvhDoubleData.m_bvhQuantization);
1205 
1206  m_curNodeIndex = quantizedBvhDoubleData.m_curNodeIndex;
1207  m_useQuantization = quantizedBvhDoubleData.m_useQuantization != 0;
1208 
1209  {
1210  int numElem = quantizedBvhDoubleData.m_numContiguousLeafNodes;
1211  m_contiguousNodes.resize(numElem);
1212 
1213  if (numElem)
1214  {
1215  btOptimizedBvhNodeDoubleData* memPtr = quantizedBvhDoubleData.m_contiguousNodesPtr;
1216 
1217  for (int i = 0; i < numElem; i++, memPtr++)
1218  {
1219  m_contiguousNodes[i].m_aabbMaxOrg.deSerializeDouble(memPtr->m_aabbMaxOrg);
1220  m_contiguousNodes[i].m_aabbMinOrg.deSerializeDouble(memPtr->m_aabbMinOrg);
1221  m_contiguousNodes[i].m_escapeIndex = memPtr->m_escapeIndex;
1222  m_contiguousNodes[i].m_subPart = memPtr->m_subPart;
1223  m_contiguousNodes[i].m_triangleIndex = memPtr->m_triangleIndex;
1224  }
1225  }
1226  }
1227 
1228  {
1229  int numElem = quantizedBvhDoubleData.m_numQuantizedContiguousNodes;
1231 
1232  if (numElem)
1233  {
1234  btQuantizedBvhNodeData* memPtr = quantizedBvhDoubleData.m_quantizedContiguousNodesPtr;
1235  for (int i = 0; i < numElem; i++, memPtr++)
1236  {
1237  m_quantizedContiguousNodes[i].m_escapeIndexOrTriangleIndex = memPtr->m_escapeIndexOrTriangleIndex;
1238  m_quantizedContiguousNodes[i].m_quantizedAabbMax[0] = memPtr->m_quantizedAabbMax[0];
1239  m_quantizedContiguousNodes[i].m_quantizedAabbMax[1] = memPtr->m_quantizedAabbMax[1];
1240  m_quantizedContiguousNodes[i].m_quantizedAabbMax[2] = memPtr->m_quantizedAabbMax[2];
1241  m_quantizedContiguousNodes[i].m_quantizedAabbMin[0] = memPtr->m_quantizedAabbMin[0];
1242  m_quantizedContiguousNodes[i].m_quantizedAabbMin[1] = memPtr->m_quantizedAabbMin[1];
1243  m_quantizedContiguousNodes[i].m_quantizedAabbMin[2] = memPtr->m_quantizedAabbMin[2];
1244  }
1245  }
1246  }
1247 
1248  m_traversalMode = btTraversalMode(quantizedBvhDoubleData.m_traversalMode);
1249 
1250  {
1251  int numElem = quantizedBvhDoubleData.m_numSubtreeHeaders;
1252  m_SubtreeHeaders.resize(numElem);
1253  if (numElem)
1254  {
1255  btBvhSubtreeInfoData* memPtr = quantizedBvhDoubleData.m_subTreeInfoPtr;
1256  for (int i = 0; i < numElem; i++, memPtr++)
1257  {
1258  m_SubtreeHeaders[i].m_quantizedAabbMax[0] = memPtr->m_quantizedAabbMax[0];
1259  m_SubtreeHeaders[i].m_quantizedAabbMax[1] = memPtr->m_quantizedAabbMax[1];
1260  m_SubtreeHeaders[i].m_quantizedAabbMax[2] = memPtr->m_quantizedAabbMax[2];
1261  m_SubtreeHeaders[i].m_quantizedAabbMin[0] = memPtr->m_quantizedAabbMin[0];
1262  m_SubtreeHeaders[i].m_quantizedAabbMin[1] = memPtr->m_quantizedAabbMin[1];
1263  m_SubtreeHeaders[i].m_quantizedAabbMin[2] = memPtr->m_quantizedAabbMin[2];
1264  m_SubtreeHeaders[i].m_rootNodeIndex = memPtr->m_rootNodeIndex;
1265  m_SubtreeHeaders[i].m_subtreeSize = memPtr->m_subtreeSize;
1266  }
1267  }
1268  }
1269 }
1270 
1272 const char* btQuantizedBvh::serialize(void* dataBuffer, btSerializer* serializer) const
1273 {
1274  btQuantizedBvhData* quantizedData = (btQuantizedBvhData*)dataBuffer;
1275 
1276  m_bvhAabbMax.serialize(quantizedData->m_bvhAabbMax);
1277  m_bvhAabbMin.serialize(quantizedData->m_bvhAabbMin);
1278  m_bvhQuantization.serialize(quantizedData->m_bvhQuantization);
1279 
1280  quantizedData->m_curNodeIndex = m_curNodeIndex;
1281  quantizedData->m_useQuantization = m_useQuantization;
1282 
1283  quantizedData->m_numContiguousLeafNodes = m_contiguousNodes.size();
1284  quantizedData->m_contiguousNodesPtr = (btOptimizedBvhNodeData*)(m_contiguousNodes.size() ? serializer->getUniquePointer((void*)&m_contiguousNodes[0]) : 0);
1285  if (quantizedData->m_contiguousNodesPtr)
1286  {
1287  int sz = sizeof(btOptimizedBvhNodeData);
1288  int numElem = m_contiguousNodes.size();
1289  btChunk* chunk = serializer->allocate(sz, numElem);
1291  for (int i = 0; i < numElem; i++, memPtr++)
1292  {
1293  m_contiguousNodes[i].m_aabbMaxOrg.serialize(memPtr->m_aabbMaxOrg);
1294  m_contiguousNodes[i].m_aabbMinOrg.serialize(memPtr->m_aabbMinOrg);
1295  memPtr->m_escapeIndex = m_contiguousNodes[i].m_escapeIndex;
1296  memPtr->m_subPart = m_contiguousNodes[i].m_subPart;
1297  memPtr->m_triangleIndex = m_contiguousNodes[i].m_triangleIndex;
1298  // Fill padding with zeros to appease msan.
1299  memset(memPtr->m_pad, 0, sizeof(memPtr->m_pad));
1300  }
1301  serializer->finalizeChunk(chunk, "btOptimizedBvhNodeData", BT_ARRAY_CODE, (void*)&m_contiguousNodes[0]);
1302  }
1303 
1304  quantizedData->m_numQuantizedContiguousNodes = m_quantizedContiguousNodes.size();
1305  // printf("quantizedData->m_numQuantizedContiguousNodes=%d\n",quantizedData->m_numQuantizedContiguousNodes);
1306  quantizedData->m_quantizedContiguousNodesPtr = (btQuantizedBvhNodeData*)(m_quantizedContiguousNodes.size() ? serializer->getUniquePointer((void*)&m_quantizedContiguousNodes[0]) : 0);
1307  if (quantizedData->m_quantizedContiguousNodesPtr)
1308  {
1309  int sz = sizeof(btQuantizedBvhNodeData);
1310  int numElem = m_quantizedContiguousNodes.size();
1311  btChunk* chunk = serializer->allocate(sz, numElem);
1313  for (int i = 0; i < numElem; i++, memPtr++)
1314  {
1315  memPtr->m_escapeIndexOrTriangleIndex = m_quantizedContiguousNodes[i].m_escapeIndexOrTriangleIndex;
1316  memPtr->m_quantizedAabbMax[0] = m_quantizedContiguousNodes[i].m_quantizedAabbMax[0];
1317  memPtr->m_quantizedAabbMax[1] = m_quantizedContiguousNodes[i].m_quantizedAabbMax[1];
1318  memPtr->m_quantizedAabbMax[2] = m_quantizedContiguousNodes[i].m_quantizedAabbMax[2];
1319  memPtr->m_quantizedAabbMin[0] = m_quantizedContiguousNodes[i].m_quantizedAabbMin[0];
1320  memPtr->m_quantizedAabbMin[1] = m_quantizedContiguousNodes[i].m_quantizedAabbMin[1];
1321  memPtr->m_quantizedAabbMin[2] = m_quantizedContiguousNodes[i].m_quantizedAabbMin[2];
1322  }
1323  serializer->finalizeChunk(chunk, "btQuantizedBvhNodeData", BT_ARRAY_CODE, (void*)&m_quantizedContiguousNodes[0]);
1324  }
1325 
1326  quantizedData->m_traversalMode = int(m_traversalMode);
1327  quantizedData->m_numSubtreeHeaders = m_SubtreeHeaders.size();
1328 
1329  quantizedData->m_subTreeInfoPtr = (btBvhSubtreeInfoData*)(m_SubtreeHeaders.size() ? serializer->getUniquePointer((void*)&m_SubtreeHeaders[0]) : 0);
1330  if (quantizedData->m_subTreeInfoPtr)
1331  {
1332  int sz = sizeof(btBvhSubtreeInfoData);
1333  int numElem = m_SubtreeHeaders.size();
1334  btChunk* chunk = serializer->allocate(sz, numElem);
1336  for (int i = 0; i < numElem; i++, memPtr++)
1337  {
1338  memPtr->m_quantizedAabbMax[0] = m_SubtreeHeaders[i].m_quantizedAabbMax[0];
1339  memPtr->m_quantizedAabbMax[1] = m_SubtreeHeaders[i].m_quantizedAabbMax[1];
1340  memPtr->m_quantizedAabbMax[2] = m_SubtreeHeaders[i].m_quantizedAabbMax[2];
1341  memPtr->m_quantizedAabbMin[0] = m_SubtreeHeaders[i].m_quantizedAabbMin[0];
1342  memPtr->m_quantizedAabbMin[1] = m_SubtreeHeaders[i].m_quantizedAabbMin[1];
1343  memPtr->m_quantizedAabbMin[2] = m_SubtreeHeaders[i].m_quantizedAabbMin[2];
1344 
1345  memPtr->m_rootNodeIndex = m_SubtreeHeaders[i].m_rootNodeIndex;
1346  memPtr->m_subtreeSize = m_SubtreeHeaders[i].m_subtreeSize;
1347  }
1348  serializer->finalizeChunk(chunk, "btBvhSubtreeInfoData", BT_ARRAY_CODE, (void*)&m_SubtreeHeaders[0]);
1349  }
1350  return btQuantizedBvhDataName;
1351 }
btQuantizedBvh::reportRayOverlappingNodex
void reportRayOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget) const
Definition: btQuantizedBvh.cpp:747
btQuantizedBvh::TRAVERSAL_STACKLESS_CACHE_FRIENDLY
Definition: btQuantizedBvh.h:173
btOptimizedBvhNode::m_escapeIndex
int m_escapeIndex
Definition: btQuantizedBvh.h:104
btVector3::serialize
void serialize(struct btVector3Data &dataOut) const
Definition: btVector3.h:1317
btQuantizedBvhFloatData::m_useQuantization
int m_useQuantization
Definition: btQuantizedBvh.h:509
btOptimizedBvhNodeFloatData
Definition: btQuantizedBvh.h:475
btQuantizedBvh::TRAVERSAL_RECURSIVE
Definition: btQuantizedBvh.h:174
btQuantizedBvh::walkRecursiveQuantizedTreeAgainstQueryAabb
void walkRecursiveQuantizedTreeAgainstQueryAabb(const btQuantizedBvhNode *currentNode, btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax) const
use the 16-byte stackless 'skipindex' node tree to do a recursive traversal
Definition: btQuantizedBvh.cpp:415
btQuantizedBvh::getAabbMin
btVector3 getAabbMin(int nodeIndex) const
Definition: btQuantizedBvh.h:224
btQuantizedBvh::unQuantize
btVector3 unQuantize(const unsigned short *vecIn) const
Definition: btQuantizedBvh.h:401
btQuantizedBvhDoubleData::m_bvhAabbMin
btVector3DoubleData m_bvhAabbMin
Definition: btQuantizedBvh.h:522
btQuantizedBvhDataName
#define btQuantizedBvhDataName
Definition: btQuantizedBvh.h:41
btOptimizedBvhNode::m_triangleIndex
int m_triangleIndex
Definition: btQuantizedBvh.h:109
btQuantizedBvh::m_traversalMode
btTraversalMode m_traversalMode
Definition: btQuantizedBvh.h:193
btQuantizedBvhFloatData::m_subTreeInfoPtr
btBvhSubtreeInfoData * m_subTreeInfoPtr
Definition: btQuantizedBvh.h:514
maxIterations
int maxIterations
Definition: btQuantizedBvh.cpp:349
btQuantizedBvhData
#define btQuantizedBvhData
Definition: btQuantizedBvh.h:39
btVector3::setValue
void setValue(const btScalar &_x, const btScalar &_y, const btScalar &_z)
Definition: btVector3.h:640
btScalar
float btScalar
The btScalar type abstracts floating point numbers, to easily switch between double and single floati...
Definition: btScalar.h:314
btQuantizedBvhDoubleData::m_bvhQuantization
btVector3DoubleData m_bvhQuantization
Definition: btQuantizedBvh.h:524
testQuantizedAabbAgainstQuantizedAabb
unsigned testQuantizedAabbAgainstQuantizedAabb(const unsigned short int *aabbMin1, const unsigned short int *aabbMax1, const unsigned short int *aabbMin2, const unsigned short int *aabbMax2)
Definition: btAabbUtil2.h:201
btQuantizedBvh::m_bvhQuantization
btVector3 m_bvhQuantization
Definition: btQuantizedBvh.h:180
btQuantizedBvhNodeData::m_escapeIndexOrTriangleIndex
int m_escapeIndexOrTriangleIndex
Definition: btQuantizedBvh.h:500
btQuantizedBvhFloatData::m_quantizedContiguousNodesPtr
btQuantizedBvhNodeData * m_quantizedContiguousNodesPtr
Definition: btQuantizedBvh.h:513
btQuantizedBvhDoubleData::m_useQuantization
int m_useQuantization
Definition: btQuantizedBvh.h:526
btQuantizedBvh::btTraversalMode
btTraversalMode
Definition: btQuantizedBvh.h:170
btQuantizedBvh::walkStacklessTreeAgainstRay
void walkStacklessTreeAgainstRay(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax, int startNodeIndex, int endNodeIndex) const
Definition: btQuantizedBvh.cpp:446
btQuantizedBvhNodeData
Definition: btQuantizedBvh.h:496
btSerializer::getUniquePointer
virtual void * getUniquePointer(void *oldPtr)=0
btChunk
Definition: btSerializer.h:47
btQuantizedBvhNode
btQuantizedBvhNode is a compressed aabb node, 16 bytes.
Definition: btQuantizedBvh.h:55
btQuantizedBvhNode::getPartId
int getPartId() const
Definition: btQuantizedBvh.h:84
btQuantizedBvh::serialize
virtual bool serialize(void *o_alignedDataBuffer, unsigned i_dataBufferSize, bool i_swapEndian) const
Data buffer MUST be 16 byte aligned.
Definition: btQuantizedBvh.cpp:834
btQuantizedBvhFloatData::m_traversalMode
int m_traversalMode
Definition: btQuantizedBvh.h:515
btQuantizedBvh::m_leafNodes
NodeArray m_leafNodes
Definition: btQuantizedBvh.h:188
btAlignedObjectArray::clear
void clear()
clear the array, deallocated memory. Generally it is better to use array.resize(0),...
Definition: btAlignedObjectArray.h:176
btQuantizedBvh
The btQuantizedBvh class stores an AABB tree that can be quickly traversed on CPU and Cell SPU.
Definition: btQuantizedBvh.h:166
btVector3::maxAxis
int maxAxis() const
Return the axis with the largest value Note return values are 0,1,2 for x, y, or z.
Definition: btVector3.h:477
btVector3::dot
btScalar dot(const btVector3 &v) const
Return the dot product.
Definition: btVector3.h:229
btOptimizedBvhNodeFloatData::m_subPart
int m_subPart
Definition: btQuantizedBvh.h:480
btQuantizedBvhFloatData::m_bvhAabbMin
btVector3FloatData m_bvhAabbMin
Definition: btQuantizedBvh.h:505
btQuantizedBvhDoubleData::m_quantizedContiguousNodesPtr
btQuantizedBvhNodeData * m_quantizedContiguousNodesPtr
Definition: btQuantizedBvh.h:530
btSwapVector3Endian
void btSwapVector3Endian(const btVector3 &sourceVec, btVector3 &destVec)
btSwapVector3Endian swaps vector endianness, useful for network and cross-platform serialization
Definition: btVector3.h:1231
btQuantizedBvh::setInternalNodeAabbMax
void setInternalNodeAabbMax(int nodeIndex, const btVector3 &aabbMax)
Definition: btQuantizedBvh.h:212
btOptimizedBvhNodeDoubleData
Definition: btQuantizedBvh.h:485
btQuantizedBvhFloatData::m_bvhAabbMax
btVector3FloatData m_bvhAabbMax
Definition: btQuantizedBvh.h:506
btQuantizedBvh::quantizeWithClamp
void quantizeWithClamp(unsigned short *out, const btVector3 &point2, int isMax) const
Definition: btQuantizedBvh.h:390
btOptimizedBvhNodeFloatData::m_escapeIndex
int m_escapeIndex
Definition: btQuantizedBvh.h:479
btQuantizedBvh::m_curNodeIndex
int m_curNodeIndex
Definition: btQuantizedBvh.h:184
btQuantizedBvh::walkStacklessQuantizedTree
void walkStacklessQuantizedTree(btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax, int startNodeIndex, int endNodeIndex) const
Definition: btQuantizedBvh.cpp:661
btQuantizedBvhDoubleData::m_subTreeInfoPtr
btBvhSubtreeInfoData * m_subTreeInfoPtr
Definition: btQuantizedBvh.h:534
btQuantizedBvhFloatData::m_bvhQuantization
btVector3FloatData m_bvhQuantization
Definition: btQuantizedBvh.h:507
btQuantizedBvh::m_contiguousNodes
NodeArray m_contiguousNodes
Definition: btQuantizedBvh.h:189
btQuantizedBvhNode::getEscapeIndex
int getEscapeIndex() const
Definition: btQuantizedBvh.h:71
btBvhSubtreeInfo::m_subtreeSize
int m_subtreeSize
Definition: btQuantizedBvh.h:128
btAssert
#define btAssert(x)
Definition: btScalar.h:153
btOptimizedBvhNode::m_aabbMaxOrg
btVector3 m_aabbMaxOrg
Definition: btQuantizedBvh.h:101
btOptimizedBvhNodeDoubleData::m_escapeIndex
int m_escapeIndex
Definition: btQuantizedBvh.h:489
BT_LARGE_FLOAT
#define BT_LARGE_FLOAT
Definition: btScalar.h:316
btQuantizedBvh::mergeInternalNodeAabb
void mergeInternalNodeAabb(int nodeIndex, const btVector3 &newAabbMin, const btVector3 &newAabbMax)
Definition: btQuantizedBvh.h:255
btQuantizedBvhNode::isLeafNode
bool isLeafNode() const
Definition: btQuantizedBvh.h:66
btIDebugDraw
The btIDebugDraw interface class allows hooking up a debug renderer to visually debug simulations.
Definition: btIDebugDraw.h:26
btBvhSubtreeInfoData::m_quantizedAabbMin
unsigned short m_quantizedAabbMin[3]
Definition: btQuantizedBvh.h:471
btBvhSubtreeInfo
btBvhSubtreeInfo provides info to gather a subtree of limited size
Definition: btQuantizedBvh.h:116
btBvhSubtreeInfoData::m_rootNodeIndex
int m_rootNodeIndex
Definition: btQuantizedBvh.h:469
btQuantizedBvhNode::m_quantizedAabbMax
unsigned short int m_quantizedAabbMax[3]
Definition: btQuantizedBvh.h:62
TestAabbAgainstAabb2
bool TestAabbAgainstAabb2(const btVector3 &aabbMin1, const btVector3 &aabbMax1, const btVector3 &aabbMin2, const btVector3 &aabbMax2)
conservative test for overlap between two aabbs
Definition: btAabbUtil2.h:43
btAlignedObjectArray::initializeFromBuffer
void initializeFromBuffer(void *buffer, int size, int capacity)
Definition: btAlignedObjectArray.h:487
bounds
static btDbvtVolume bounds(btDbvtNode **leaves, int count)
Definition: btDbvt.cpp:299
btAlignedObjectArray::resize
void resize(int newsize, const T &fillData=T())
Definition: btAlignedObjectArray.h:203
btQuantizedBvh::walkStacklessQuantizedTreeAgainstRay
void walkStacklessQuantizedTreeAgainstRay(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax, int startNodeIndex, int endNodeIndex) const
Definition: btQuantizedBvh.cpp:536
btVector3::setMax
void setMax(const btVector3 &other)
Set each element to the max of the current values and the values of another btVector3.
Definition: btVector3.h:609
btQuantizedBvhFloatData::m_curNodeIndex
int m_curNodeIndex
Definition: btQuantizedBvh.h:508
btOptimizedBvhNodeFloatData::m_aabbMinOrg
btVector3FloatData m_aabbMinOrg
Definition: btQuantizedBvh.h:477
btQuantizedBvh::updateSubtreeHeaders
void updateSubtreeHeaders(int leftChildNodexIndex, int rightChildNodexIndex)
Definition: btQuantizedBvh.cpp:200
btQuantizedBvh::m_useQuantization
bool m_useQuantization
Definition: btQuantizedBvh.h:186
btQuantizedBvh::reportBoxCastOverlappingNodex
void reportBoxCastOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax) const
Definition: btQuantizedBvh.cpp:752
btQuantizedBvhDoubleData::m_bvhAabbMax
btVector3DoubleData m_bvhAabbMax
Definition: btQuantizedBvh.h:523
btNodeOverlapCallback
Definition: btQuantizedBvh.h:147
btQuantizedBvh::deSerializeDouble
virtual void deSerializeDouble(struct btQuantizedBvhDoubleData &quantizedBvhDoubleData)
Definition: btQuantizedBvh.cpp:1200
btOptimizedBvhNodeFloatData::m_aabbMaxOrg
btVector3FloatData m_aabbMaxOrg
Definition: btQuantizedBvh.h:478
btOptimizedBvhNodeDoubleData::m_triangleIndex
int m_triangleIndex
Definition: btQuantizedBvh.h:491
btBvhSubtreeInfo::m_quantizedAabbMin
unsigned short int m_quantizedAabbMin[3]
Definition: btQuantizedBvh.h:123
btOptimizedBvhNodeFloatData::m_triangleIndex
int m_triangleIndex
Definition: btQuantizedBvh.h:481
btSerializer.h
btQuantizedBvh::m_bvhAabbMax
btVector3 m_bvhAabbMax
Definition: btQuantizedBvh.h:179
btSerializer::finalizeChunk
virtual void finalizeChunk(btChunk *chunk, const char *structType, int chunkCode, void *oldPtr)=0
btSwapEndian
unsigned btSwapEndian(unsigned val)
Definition: btScalar.h:651
btQuantizedBvhDoubleData::m_contiguousNodesPtr
btOptimizedBvhNodeDoubleData * m_contiguousNodesPtr
Definition: btQuantizedBvh.h:529
btRayAabb2
bool btRayAabb2(const btVector3 &rayFrom, const btVector3 &rayInvDirection, const unsigned int raySign[3], const btVector3 bounds[2], btScalar &tmin, btScalar lambda_min, btScalar lambda_max)
Definition: btAabbUtil2.h:82
btQuantizedBvh::m_quantizedLeafNodes
QuantizedNodeArray m_quantizedLeafNodes
Definition: btQuantizedBvh.h:190
btQuantizedBvh::deSerializeInPlace
static btQuantizedBvh * deSerializeInPlace(void *i_alignedDataBuffer, unsigned int i_dataBufferSize, bool i_swapEndian)
deSerializeInPlace loads and initializes a BVH from a buffer in memory 'in place'
Definition: btQuantizedBvh.cpp:1016
btQuantizedBvhNodeData::m_quantizedAabbMin
unsigned short m_quantizedAabbMin[3]
Definition: btQuantizedBvh.h:498
SIMD_INFINITY
#define SIMD_INFINITY
Definition: btScalar.h:544
btVector3
btVector3 can be used to represent 3D points and vectors.
Definition: btVector3.h:80
btQuantizedBvhDoubleData::m_numSubtreeHeaders
int m_numSubtreeHeaders
Definition: btQuantizedBvh.h:533
btQuantizedBvhDoubleData::m_numContiguousLeafNodes
int m_numContiguousLeafNodes
Definition: btQuantizedBvh.h:527
btQuantizedBvh.h
btOptimizedBvhNode::m_subPart
int m_subPart
Definition: btQuantizedBvh.h:108
btOptimizedBvhNodeDoubleData::m_subPart
int m_subPart
Definition: btQuantizedBvh.h:490
btChunk::m_oldPtr
void * m_oldPtr
Definition: btSerializer.h:52
btAabbUtil2.h
btQuantizedBvh::~btQuantizedBvh
virtual ~btQuantizedBvh()
Definition: btQuantizedBvh.cpp:112
btQuantizedBvh::setQuantizationValues
void setQuantizationValues(const btVector3 &bvhAabbMin, const btVector3 &bvhAabbMax, btScalar quantizationMargin=btScalar(1.0))
***************************************** expert/internal use only *************************
Definition: btQuantizedBvh.cpp:81
btQuantizedBvh::deSerializeFloat
virtual void deSerializeFloat(struct btQuantizedBvhFloatData &quantizedBvhFloatData)
Definition: btQuantizedBvh.cpp:1129
btQuantizedBvhFloatData::m_contiguousNodesPtr
btOptimizedBvhNodeFloatData * m_contiguousNodesPtr
Definition: btQuantizedBvh.h:512
btQuantizedBvh::walkStacklessQuantizedTreeCacheFriendly
void walkStacklessQuantizedTreeCacheFriendly(btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax) const
tree traversal designed for small-memory processors like PS3 SPU
Definition: btQuantizedBvh.cpp:726
btNodeOverlapCallback::processNode
virtual void processNode(int subPart, int triangleIndex)=0
btOptimizedBvhNodeData
#define btOptimizedBvhNodeData
Definition: btQuantizedBvh.h:40
btQuantizedBvh::m_SubtreeHeaders
BvhSubtreeInfoArray m_SubtreeHeaders
Definition: btQuantizedBvh.h:194
btQuantizedBvh::setInternalNodeEscapeIndex
void setInternalNodeEscapeIndex(int nodeIndex, int escapeIndex)
Definition: btQuantizedBvh.h:243
btQuantizedBvh::walkStacklessTree
void walkStacklessTree(btNodeOverlapCallback *nodeCallback, const btVector3 &aabbMin, const btVector3 &aabbMax) const
Definition: btQuantizedBvh.cpp:351
btBvhSubtreeInfoData::m_quantizedAabbMax
unsigned short m_quantizedAabbMax[3]
Definition: btQuantizedBvh.h:472
btUnSwapVector3Endian
void btUnSwapVector3Endian(btVector3 &vector)
btUnSwapVector3Endian swaps vector endianness, useful for network and cross-platform serialization
Definition: btVector3.h:1240
btSerializer
Definition: btSerializer.h:65
btVector3::setMin
void setMin(const btVector3 &other)
Set each element to the min of the current values and the values of another btVector3.
Definition: btVector3.h:626
btQuantizedBvh::getAlignmentSerializationPadding
static unsigned int getAlignmentSerializationPadding()
Definition: btQuantizedBvh.cpp:817
btAlignedObjectArray::expand
T & expand(const T &fillValue=T())
Definition: btAlignedObjectArray.h:242
btQuantizedBvhDoubleData::m_traversalMode
int m_traversalMode
Definition: btQuantizedBvh.h:532
btQuantizedBvh::setInternalNodeAabbMin
void setInternalNodeAabbMin(int nodeIndex, const btVector3 &aabbMin)
two versions, one for quantized and normal nodes.
Definition: btQuantizedBvh.h:201
BT_ARRAY_CODE
#define BT_ARRAY_CODE
Definition: btSerializer.h:118
btIDebugDraw.h
btQuantizedBvhFloatData::m_numQuantizedContiguousNodes
int m_numQuantizedContiguousNodes
Definition: btQuantizedBvh.h:511
btQuantizedBvh::btQuantizedBvh
btQuantizedBvh()
Definition: btQuantizedBvh.cpp:24
btQuantizedBvhFloatData
Definition: btQuantizedBvh.h:503
btQuantizedBvh::TRAVERSAL_STACKLESS
Definition: btQuantizedBvh.h:172
btOptimizedBvhNode::m_aabbMinOrg
btVector3 m_aabbMinOrg
Definition: btQuantizedBvh.h:100
btBvhSubtreeInfoData::m_subtreeSize
int m_subtreeSize
Definition: btQuantizedBvh.h:470
btVector3::deSerializeDouble
void deSerializeDouble(const struct btVector3DoubleData &dataIn)
Definition: btVector3.h:1311
btQuantizedBvh::calcSplittingAxis
int calcSplittingAxis(int startIndex, int endIndex)
Definition: btQuantizedBvh.cpp:285
btQuantizedBvh::quantize
void quantize(unsigned short *out, const btVector3 &point, int isMax) const
Definition: btQuantizedBvh.h:326
btQuantizedBvh::getAabbMax
btVector3 getAabbMax(int nodeIndex) const
Definition: btQuantizedBvh.h:233
btOptimizedBvhNode
btOptimizedBvhNode contains both internal and leaf node information.
Definition: btQuantizedBvh.h:94
btBvhSubtreeInfo::m_rootNodeIndex
int m_rootNodeIndex
Definition: btQuantizedBvh.h:126
btQuantizedBvhNodeData::m_quantizedAabbMax
unsigned short m_quantizedAabbMax[3]
Definition: btQuantizedBvh.h:499
btOptimizedBvhNodeDoubleData::m_aabbMinOrg
btVector3DoubleData m_aabbMinOrg
Definition: btQuantizedBvh.h:487
btBvhSubtreeInfo::setAabbFromQuantizeNode
void setAabbFromQuantizeNode(const btQuantizedBvhNode &quantizedNode)
Definition: btQuantizedBvh.h:136
btQuantizedBvh::buildInternal
void buildInternal()
buildInternal is expert use only: assumes that setQuantizationValues and LeafNodeArray are initialize...
Definition: btQuantizedBvh.cpp:36
btBvhSubtreeInfoData
Definition: btQuantizedBvh.h:467
btOptimizedBvhNodeDoubleData::m_aabbMaxOrg
btVector3DoubleData m_aabbMaxOrg
Definition: btQuantizedBvh.h:488
btVector3::deSerializeFloat
void deSerializeFloat(const struct btVector3FloatData &dataIn)
Definition: btVector3.h:1298
btQuantizedBvh::swapLeafNodes
void swapLeafNodes(int firstIndex, int secondIndex)
Definition: btQuantizedBvh.cpp:778
btQuantizedBvh::reportAabbOverlappingNodex
void reportAabbOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &aabbMin, const btVector3 &aabbMax) const
***************************************** expert/internal use only *************************
Definition: btQuantizedBvh.cpp:312
BT_BULLET_VERSION
#define BT_BULLET_VERSION
Definition: btScalar.h:28
btQuantizedBvhNode::getTriangleIndex
int getTriangleIndex() const
Definition: btQuantizedBvh.h:76
btQuantizedBvh::sortAndCalcSplittingIndex
int sortAndCalcSplittingIndex(int startIndex, int endIndex, int splitAxis)
Definition: btQuantizedBvh.cpp:232
btQuantizedBvhDoubleData
Definition: btQuantizedBvh.h:520
btQuantizedBvhFloatData::m_numContiguousLeafNodes
int m_numContiguousLeafNodes
Definition: btQuantizedBvh.h:510
btQuantizedBvh::buildTree
void buildTree(int startIndex, int endIndex)
Definition: btQuantizedBvh.cpp:121
btBvhSubtreeInfo::m_quantizedAabbMax
unsigned short int m_quantizedAabbMax[3]
Definition: btQuantizedBvh.h:124
btQuantizedBvh::m_subtreeHeaderCount
int m_subtreeHeaderCount
Definition: btQuantizedBvh.h:197
btVector3::normalize
btVector3 & normalize()
Normalize this vector x^2 + y^2 + z^2 = 1.
Definition: btVector3.h:303
btQuantizedBvh::assignInternalNodeFromLeafNode
void assignInternalNodeFromLeafNode(int internalNode, int leafNodeIndex)
Definition: btQuantizedBvh.cpp:794
btQuantizedBvh::m_quantizedContiguousNodes
QuantizedNodeArray m_quantizedContiguousNodes
Definition: btQuantizedBvh.h:191
btQuantizedBvhDoubleData::m_numQuantizedContiguousNodes
int m_numQuantizedContiguousNodes
Definition: btQuantizedBvh.h:528
MAX_SUBTREE_SIZE_IN_BYTES
#define MAX_SUBTREE_SIZE_IN_BYTES
Definition: btQuantizedBvh.h:47
btQuantizedBvh::m_bvhAabbMin
btVector3 m_bvhAabbMin
Definition: btQuantizedBvh.h:178
btSerializer::allocate
virtual btChunk * allocate(size_t size, int numElements)=0
btQuantizedBvh::calculateSerializeBufferSize
unsigned calculateSerializeBufferSize() const
Definition: btQuantizedBvh.cpp:823
btIDebugDraw::drawAabb
virtual void drawAabb(const btVector3 &from, const btVector3 &to, const btVector3 &color)
Definition: btIDebugDraw.h:135
btAlignedObjectArray::size
int size() const
return the number of elements in the array
Definition: btAlignedObjectArray.h:142
btRayAabb
bool btRayAabb(const btVector3 &rayFrom, const btVector3 &rayTo, const btVector3 &aabbMin, const btVector3 &aabbMax, btScalar &param, btVector3 &normal)
Definition: btAabbUtil2.h:117
btQuantizedBvhDoubleData::m_curNodeIndex
int m_curNodeIndex
Definition: btQuantizedBvh.h:525
btQuantizedBvhFloatData::m_numSubtreeHeaders
int m_numSubtreeHeaders
Definition: btQuantizedBvh.h:516
btQuantizedBvhNode::m_quantizedAabbMin
unsigned short int m_quantizedAabbMin[3]
Definition: btQuantizedBvh.h:61