Skip to content

Commit

Permalink
Impose artificial limit in the amount of page, that a node can contain
Browse files Browse the repository at this point in the history
Plus, Use IReadOnlyList instead of IReadOnlyCollection
  • Loading branch information
InusualZ committed Jan 1, 2021
1 parent 82806b7 commit 1c9b86b
Showing 1 changed file with 11 additions and 10 deletions.
21 changes: 11 additions & 10 deletions src/UglyToad.PdfPig/Writer/PdfMerger.cs
Expand Up @@ -31,7 +31,7 @@ public static class PdfMerger
/// <summary>
/// Merge two PDF documents together with the pages from <paramref name="file1"/> followed by <paramref name="file2"/>.
/// </summary>
public static byte[] Merge(string file1, string file2, IReadOnlyCollection<int> file1Selection = null, IReadOnlyCollection<int> file2Selection = null)
public static byte[] Merge(string file1, string file2, IReadOnlyList<int> file1Selection = null, IReadOnlyList<int> file2Selection = null)
{
if (file1 == null)
{
Expand Down Expand Up @@ -75,7 +75,7 @@ public static byte[] Merge(params string[] filePaths)
/// <summary>
/// Merge the set of PDF documents.
/// </summary>
public static byte[] Merge(IReadOnlyList<byte[]> files, IReadOnlyList<IReadOnlyCollection<int>> pagesBundle = null)
public static byte[] Merge(IReadOnlyList<byte[]> files, IReadOnlyList<IReadOnlyList<int>> pagesBundle = null)
{
if (files == null)
{
Expand All @@ -90,7 +90,7 @@ public static byte[] Merge(IReadOnlyList<byte[]> files, IReadOnlyList<IReadOnlyC
{
var file = files[fileIndex];

IReadOnlyCollection<int> pages = null;
IReadOnlyList<int> pages = null;
if (pagesBundle != null && fileIndex < pagesBundle.Count)
{
pages = pagesBundle[fileIndex];
Expand Down Expand Up @@ -160,6 +160,8 @@ private class DocumentMerger
{
private const decimal DefaultVersion = 1.2m;

private const int ARTIFICIAL_NODE_LIMIT = 100;

private readonly PdfStreamWriter context = new PdfStreamWriter();
private readonly List<IndirectReferenceToken> pagesTokenReferences = new List<IndirectReferenceToken>();
private readonly IndirectReferenceToken rootPagesReference;
Expand All @@ -172,7 +174,7 @@ public DocumentMerger()
rootPagesReference = context.ReserveNumberToken();
}

public void AppendDocument(Catalog catalog, decimal version, IPdfTokenScanner tokenScanner, IReadOnlyCollection<int> pages)
public void AppendDocument(Catalog catalog, decimal version, IPdfTokenScanner tokenScanner, IReadOnlyList<int> pages)
{
IEnumerable<int> pageIndices;
if (pages == null)
Expand Down Expand Up @@ -209,15 +211,15 @@ bool DoesAEntryCollide(PageTreeNode node)
var dictionary = node.NodeDictionary;
if (dictionary.TryGet(NameToken.Resources, tokenScanner, out DictionaryToken resourcesDictionary))
{
var nonCollidingResources = resourcesDictionary.Data.Keys.Except(resources.Keys).ToList();
if (nonCollidingResources.Count != resourcesDictionary.Data.Count)
var nonCollidingResources = resourcesDictionary.Data.Keys.Except(resources.Keys);
if (nonCollidingResources.Count() != resourcesDictionary.Data.Count)
{
// This means that at least one of the resources collided
return true;
}
}

/* TODO: How to handle them?
/* TODO: How to handle?
* `Rotate`
* `CropBox`
* `MediaBox`
Expand Down Expand Up @@ -245,13 +247,12 @@ void CopyEntries(PageTreeNode node)
}
}

/* TODO: How to handle them?
/* TODO: How to handle?
* `Rotate`
* `CropBox`
* `MediaBox`
*/

// No colliding entry was found, in this node
// Keep walking up into the tree
node = node.Parent;
}
Expand Down Expand Up @@ -286,7 +287,7 @@ void CreateTree()
foreach (var pageIndex in pageIndices)
{
var pageNode = catalog.GetPageNode(pageIndex);
if (DoesAEntryCollide(pageNode))
if (pagesReferences.Count >= ARTIFICIAL_NODE_LIMIT || DoesAEntryCollide(pageNode))
{
CreateTree();

Expand Down

0 comments on commit 1c9b86b

Please sign in to comment.