fix tests

This commit is contained in:
Adrian Mariano 2021-11-11 09:09:54 -05:00
parent 69e8491659
commit ec87be11ec
2 changed files with 38 additions and 37 deletions

View file

@ -37,8 +37,8 @@ test_vnf_faces();
module test_vnf_from_polygons() {
verts = [[-1,-1,-1],[1,-1,-1],[0,1,-1],[0,0,1]];
faces = [[0,1,2],[0,1,3,2],[2,3,0]];
assert(vnf_merge(cleanup=true,
[vnf_from_polygons([for (face=faces) select(verts,face)])]) == [verts,faces]);
assert(vnf_merge_points(
vnf_from_polygons([for (face=faces) select(verts,face)])) == [verts,faces]);
}
test_vnf_from_polygons();
@ -58,12 +58,12 @@ module test_vnf_area(){
test_vnf_area();
module test_vnf_merge() {
module test_vnf_join() {
vnf1 = vnf_from_polygons([[[-1,-1,-1],[1,-1,-1],[0,1,-1]]]);
vnf2 = vnf_from_polygons([[[1,1,1],[-1,1,1],[0,1,-1]]]);
assert(vnf_merge([vnf1,vnf2]) == [[[-1,-1,-1],[1,-1,-1],[0,1,-1],[1,1,1],[-1,1,1],[0,1,-1]],[[0,1,2],[3,4,5]]]);
assert(vnf_join([vnf1,vnf2]) == [[[-1,-1,-1],[1,-1,-1],[0,1,-1],[1,1,1],[-1,1,1],[0,1,-1]],[[0,1,2],[3,4,5]]]);
}
test_vnf_merge();
test_vnf_join();
module test_vnf_triangulate() {

View file

@ -317,38 +317,6 @@ function vnf_join(vnfs) =
// Function: vnf_merge_points()
// Usage:
// new_vnf = vnf_merge_points(vnf, [eps]);
// Description:
// Given a VNF, consolidates all duplicate vertices with a tolerance `eps`, relabeling the faces as necessary,
// and eliminating any face with fewer than 3 vertices. Unreferenced vertices of the input VNF are not dropped.
// To remove such vertices uses {{vnf_drop_unused_points()}}.
// Arguments:
// vnf = a VNF to consolidate
// eps = the tolerance in finding duplicates. Default: EPSILON
function vnf_merge_points(vnf,eps=EPSILON) =
let(
verts = vnf[0],
dedup = vector_search(verts,eps,verts), // collect vertex duplicates
map = [for(i=idx(verts)) min(dedup[i]) ], // remap duplic vertices
offset = cumsum([for(i=idx(verts)) map[i]==i ? 0 : 1 ]), // remaping face vertex offsets
map2 = list(idx(verts))-offset, // map old vertex indices to new indices
nverts = [for(i=idx(verts)) if(map[i]==i) verts[i] ], // this doesn't eliminate unreferenced vertices
nfaces =
[ for(face=vnf[1])
let(
nface = [ for(vi=face) map2[map[vi]] ],
dface = [for (i=idx(nface))
if( nface[i]!=nface[(i+1)%len(nface)])
nface[i] ]
)
if(len(dface) >= 3) dface
]
)
[nverts, nfaces];
// Function: vnf_from_polygons()
// Usage:
// vnf = vnf_from_polygons(polygons);
@ -606,6 +574,39 @@ function vnf_quantize(vnf,q=pow(2,-12)) =
[[for (pt = vnf[0]) quant(pt,q)], vnf[1]];
// Function: vnf_merge_points()
// Usage:
// new_vnf = vnf_merge_points(vnf, [eps]);
// Description:
// Given a VNF, consolidates all duplicate vertices with a tolerance `eps`, relabeling the faces as necessary,
// and eliminating any face with fewer than 3 vertices. Unreferenced vertices of the input VNF are not dropped.
// To remove such vertices uses {{vnf_drop_unused_points()}}.
// Arguments:
// vnf = a VNF to consolidate
// eps = the tolerance in finding duplicates. Default: EPSILON
function vnf_merge_points(vnf,eps=EPSILON) =
let(
verts = vnf[0],
dedup = vector_search(verts,eps,verts), // collect vertex duplicates
map = [for(i=idx(verts)) min(dedup[i]) ], // remap duplic vertices
offset = cumsum([for(i=idx(verts)) map[i]==i ? 0 : 1 ]), // remaping face vertex offsets
map2 = list(idx(verts))-offset, // map old vertex indices to new indices
nverts = [for(i=idx(verts)) if(map[i]==i) verts[i] ], // this doesn't eliminate unreferenced vertices
nfaces =
[ for(face=vnf[1])
let(
nface = [ for(vi=face) map2[map[vi]] ],
dface = [for (i=idx(nface))
if( nface[i]!=nface[(i+1)%len(nface)])
nface[i] ]
)
if(len(dface) >= 3) dface
]
)
[nverts, nfaces];
// Function: vnf_drop_unused_points()
// Usage:
// clean_vnf=vnf_drop_unused_points(vnf);