Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/Triangle.Examples/Examples/Example5.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ public static bool Run(bool print = false)
VariableArea = true
};

// The aCute refinement algorithm might fail when used with variable
// constraints, so we use Ruppert's refinement algorithm here.
quality.UseLegacyRefinement = true;

//quality.UserTest = (t, area) => t.Label == 1 && area > 0.01;

var mesh = poly.Triangulate(options, quality);
Expand Down
1 change: 1 addition & 0 deletions src/Triangle.Tests/Tools/AdjacencyMatrixTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ public void TestAdjacencyMatrix()
// Highest vertex id after renumbering is 4, since there
// is no duplicate vertex.
Assert.AreEqual(4, matrix.RowIndices.Max());
Assert.AreEqual(5, matrix.ColumnCount);
}

[Test]
Expand Down
49 changes: 49 additions & 0 deletions src/Triangle.Tests/Tools/CuthillMcKeeTest.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
using NUnit.Framework;
using TriangleNet.Meshing;
using TriangleNet.Tools;

namespace TriangleNet.Tests.Tools
{
public class CuthillMcKeeTest
{
[Test]
public void TestCuthillMcKee()
{
var mesh = (Mesh)GenericMesher.StructuredMesh(4.0, 3.0, 2, 1);

var matrix = new AdjacencyMatrix(mesh);

CollectionAssert.AreEqual(matrix.ColumnPointers, new int[] { 0, 4, 7, 11, 17, 21, 24 });
CollectionAssert.AreEqual(matrix.RowIndices, new int[] {
0, 1, 2, 3,
0, 1, 3,
0, 2, 3, 4,
0, 1, 2, 3, 4, 5,
2, 3, 4, 5,
3, 4, 5 });

Assert.AreEqual(7, matrix.Bandwidth());

var p = new CuthillMcKee().Renumber(matrix);

foreach (var node in mesh.Vertices)
{
node.ID = p[node.ID];
}

var pmatrix = new AdjacencyMatrix(mesh, false);

CollectionAssert.AreEqual(pmatrix.ColumnPointers, new int[] { 0, 3, 7, 11, 15, 21, 24 });
CollectionAssert.AreEqual(pmatrix.RowIndices, new int[] {
0, 2, 4,
1, 2, 3, 4,
0, 1, 2, 4,
1, 3, 4, 5,
0, 1, 2, 3, 4, 5,
3, 4, 5 });

// For structured meshes we cannot expect an improved bandwidth.
Assert.AreEqual(9, pmatrix.Bandwidth());
}
}
}
2 changes: 1 addition & 1 deletion src/Triangle/Meshing/QualityOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public class QualityOptions
/// </summary>
/// <remarks>
/// If this flag is set to true, the original Triangle refinement algorithm will be
/// used (Ruppert's algorithm). Otherwise the aCute algorithm used.
/// used (Ruppert's algorithm). Otherwise the aCute algorithm is used.
/// </remarks>
public bool UseLegacyRefinement { get; set; }
}
Expand Down
42 changes: 26 additions & 16 deletions src/Triangle/Tools/AdjacencyMatrix.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,21 +45,36 @@ public class AdjacencyMatrix
/// </summary>
/// <param name="mesh">The mesh.</param>
/// <remarks>
/// NOTE: as a side effect, computing the adjacency matrix will affect the
/// node numbering of the mesh.
/// As a side effect, this constructor will affect the node numbering of the
/// mesh to ensure that all regular vertices are numbered in a linear way (undead
/// vertices will be skipped and have negative ids). If you want to avoid the
/// renumbering, use the <see cref="AdjacencyMatrix(Mesh, bool)"/> constructor.
/// </remarks>
public AdjacencyMatrix(Mesh mesh)
: this(mesh, true)
{
}

/// <summary>
/// Initializes a new instance of the <see cref="AdjacencyMatrix" /> class.
/// </summary>
/// <param name="mesh">The mesh.</param>
/// <param name="renumber">Determines whether nodes should automatically be renumbered.</param>
public AdjacencyMatrix(Mesh mesh, bool renumber)
{
int n = mesh.vertices.Count;

// Undead vertices should not be considered in the adjacency matrix.
ColumnCount = n - mesh.undeads;

// Renumber nodes, excluding undeads.
int i = 0;
foreach (var vertex in mesh.vertices.Values)
if (renumber)
{
vertex.id = vertex.type == VertexType.UndeadVertex ? -i : i++;
// Renumber nodes, excluding undeads.
int i = 0;
foreach (var vertex in mesh.vertices.Values)
{
vertex.id = vertex.type == VertexType.UndeadVertex ? -i : i++;
}
}

// Set up the adj_row adjacency pointer array.
Expand Down Expand Up @@ -104,19 +119,14 @@ public AdjacencyMatrix(int[] pcol, int[] irow)
/// <returns>Bandwidth of the adjacency matrix.</returns>
public int Bandwidth()
{
int band_hi;
int band_lo;
int col;
int i, j;

band_lo = 0;
band_hi = 0;
int band_lo = 0;
int band_hi = 0;

for (i = 0; i < ColumnCount; i++)
for (int i = 0; i < ColumnCount; i++)
{
for (j = pcol[i]; j < pcol[i + 1]; j++)
for (int j = pcol[i]; j < pcol[i + 1]; j++)
{
col = irow[j];
int col = irow[j];
band_lo = Math.Max(band_lo, i - col);
band_hi = Math.Max(band_hi, col - i);
}
Expand Down
103 changes: 47 additions & 56 deletions src/Triangle/Tools/CuthillMcKee.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,23 @@ namespace TriangleNet.Tools
/// Applies the Cuthill and McKee renumbering algorithm to reduce the bandwidth of
/// the adjacency matrix associated with the mesh.
/// </summary>
/// <remarks>
/// References:
///
/// Alan George, Joseph Liu,
/// Computer Solution of Large Sparse Positive Definite Systems,
/// Prentice Hall, 1981.
///
/// Norman Gibbs, William Poole, Paul Stockmeyer,
/// An Algorithm for Reducing the Bandwidth and Profile of a Sparse Matrix,
/// SIAM Journal on Numerical Analysis,
/// Volume 13, pages 236-250, 1976.
///
/// Norman Gibbs,
/// Algorithm 509: A Hybrid Profile Reduction Algorithm,
/// ACM Transactions on Mathematical Software,
/// Volume 2, pages 378-387, 1976.
/// </remarks>
public class CuthillMcKee
{
// The adjacency matrix of the mesh.
Expand All @@ -37,8 +54,6 @@ public int[] Renumber(AdjacencyMatrix matrix)
{
this.matrix = matrix;

int bandwidth1 = matrix.Bandwidth();

var pcol = matrix.ColumnPointers;

// Adjust column pointers (1-based indexing).
Expand All @@ -51,17 +66,18 @@ public int[] Renumber(AdjacencyMatrix matrix)

int[] perm_inv = PermInverse(perm);

int bandwidth2 = PermBandwidth(perm, perm_inv);
// Adjust column pointers (0-based indexing).
Shift(pcol, false);

if (Log.Verbose)
{
int bandwidth1 = matrix.Bandwidth();
int bandwidth2 = PermBandwidth(perm, perm_inv);

Log.Instance.Info(string.Format("Reverse Cuthill-McKee (Bandwidth: {0} > {1})",
bandwidth1, bandwidth2));
}

// Adjust column pointers (0-based indexing).
Shift(pcol, false);

return perm_inv;
}

Expand Down Expand Up @@ -139,18 +155,18 @@ int[] GenerateRcm()
/// <param name="offset">Internal array offset.</param>
/// <param name="iccsze">Output, int ICCSZE, the size of the connected component that has been numbered.</param>
/// <remarks>
/// The connected component is specified by a node ROOT and a mask.
/// The numbering starts at the root node.
/// The connected component is specified by a node ROOT and a mask.
/// The numbering starts at the root node.
///
/// An outline of the algorithm is as follows:
/// An outline of the algorithm is as follows:
///
/// X(1) = ROOT.
/// X(1) = ROOT.
///
/// for ( I = 1 to N-1)
/// Find all unlabeled neighbors of X(I),
/// assign them the next available labels, in order of increasing degree.
/// for ( I = 1 to N-1)
/// Find all unlabeled neighbors of X(I),
/// assign them the next available labels, in order of increasing degree.
///
/// When done, reverse the ordering.
/// When done, reverse the ordering.
/// </remarks>
void Rcm(int root, int[] mask, int[] perm, int offset, ref int iccsze)
{
Expand Down Expand Up @@ -292,21 +308,6 @@ void Rcm(int root, int[] mask, int[] perm, int offset, ref int iccsze)
/// returned as a list of nodes LS, and pointers to the beginning
/// of the list of nodes that are at a distance of 0, 1, 2, ...,
/// NODE_NUM-1 from the pseudo-peripheral node.
///
/// Reference:
/// Alan George, Joseph Liu,
/// Computer Solution of Large Sparse Positive Definite Systems,
/// Prentice Hall, 1981.
///
/// Norman Gibbs, William Poole, Paul Stockmeyer,
/// An Algorithm for Reducing the Bandwidth and Profile of a Sparse Matrix,
/// SIAM Journal on Numerical Analysis,
/// Volume 13, pages 236-250, 1976.
///
/// Norman Gibbs,
/// Algorithm 509: A Hybrid Profile Reduction Algorithm,
/// ACM Transactions on Mathematical Software,
/// Volume 2, pages 378-387, 1976.
/// </remarks>
void FindRoot(ref int root, int[] mask, ref int level_num, int[] level_row,
int[] level, int offset)
Expand Down Expand Up @@ -415,11 +416,6 @@ void FindRoot(ref int root, int[] mask, ref int level_num, int[] level_row,
/// assigned level 2 and masked. The process continues until there
/// are no unmasked nodes adjacent to any node in the current level.
/// The number of levels may vary between 2 and NODE_NUM.
///
/// Reference:
/// Alan George, Joseph Liu,
/// Computer Solution of Large Sparse Positive Definite Systems,
/// Prentice Hall, 1981.
/// </remarks>
void GetLevelSet(ref int root, int[] mask, ref int level_num, int[] level_row,
int[] level, int offset)
Expand Down Expand Up @@ -501,13 +497,8 @@ void GetLevelSet(ref int root, int[] mask, ref int level_num, int[] level_row,
/// connected component, starting with ROOT, and proceeding by levels.</param>
/// <param name="offset">Internal array offset.</param>
/// <remarks>
/// The connected component is specified by MASK and ROOT.
/// Nodes for which MASK is zero are ignored.
///
/// Reference:
/// Alan George, Joseph Liu,
/// Computer Solution of Large Sparse Positive Definite Systems,
/// Prentice Hall, 1981.
/// The connected component is specified by MASK and ROOT.
/// Nodes for which MASK is zero are ignored.
/// </remarks>
void Degree(int root, int[] mask, int[] deg, ref int iccsze, int[] ls, int offset)
{
Expand Down Expand Up @@ -547,13 +538,13 @@ void Degree(int root, int[] mask, int[] deg, ref int iccsze, int[] ls, int offse
{
nbr = irow[j - 1];

if (mask[nbr] != 0) // EDIT: [nbr - 1]
if (mask[nbr] != 0)
{
ideg = ideg + 1;

if (0 <= pcol[nbr]) // EDIT: [nbr - 1]
if (0 <= pcol[nbr])
{
pcol[nbr] = -pcol[nbr]; // EDIT: [nbr - 1]
pcol[nbr] = -pcol[nbr];
iccsze = iccsze + 1;
ls[offset + iccsze - 1] = nbr;
}
Expand Down Expand Up @@ -595,18 +586,20 @@ int PermBandwidth(int[] perm, int[] perm_inv)
int[] pcol = matrix.ColumnPointers;
int[] irow = matrix.RowIndices;

int col, i, j;
int col, end;

int band_lo = 0;
int band_hi = 0;

int n = matrix.ColumnCount;

for (i = 0; i < n; i++)
for (int i = 0; i < n; i++)
{
for (j = pcol[perm[i]]; j < pcol[perm[i] + 1]; j++)
end = pcol[perm[i] + 1];

for (int j = pcol[perm[i]]; j < end; j++)
{
col = perm_inv[irow[j - 1]];
col = perm_inv[irow[j]];
band_lo = Math.Max(band_lo, i - col);
band_hi = Math.Max(band_hi, col - i);
}
Expand Down Expand Up @@ -650,17 +643,15 @@ int[] PermInverse(int[] perm)
/// </example>
void ReverseVector(int[] a, int offset, int size)
{
int i;
int j;
int j, middle = offset + size / 2,
end = offset + size - 1;

for (i = 0; i < size / 2; i++)
for (int i = offset; i < middle; i++)
{
j = a[offset + i];
a[offset + i] = a[offset + size - 1 - i];
a[offset + size - 1 - i] = j;
j = a[i];
a[i] = a[end - i + offset];
a[end - i + offset] = j;
}

return;
}

void Shift(int[] a, bool up)
Expand Down