Skip to content
This repository was archived by the owner on Mar 17, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions Client.Tests/Client.Tests.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,9 @@
</Choose>
<ItemGroup>
<Compile Include="Core\Compression\JpgCompression.Tests.cs" />
<Compile Include="Core\Compression\SafeQuickLZ.Tests.cs" />
<Compile Include="Core\Encryption\AES.Tests.cs" />
<Compile Include="Core\Encryption\SHA256.Tests.cs" />
<Compile Include="Core\Information\GeoIP.Tests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
Expand All @@ -66,7 +66,9 @@
<Name>Client</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup />
<ItemGroup>
<Folder Include="Core\Information\" />
</ItemGroup>
<Choose>
<When Condition="'$(VisualStudioVersion)' == '10.0' And '$(IsCodedUITest)' == 'True'">
<ItemGroup>
Expand Down
131 changes: 131 additions & 0 deletions Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using xClient.Core.Compression;

namespace xClient.Tests.Core.Compression
{
[TestClass]
public class SafeQuickLZTests
{
// Tests using pseudo-randomly generated data.
#region Random Data

/*
* Purpose: To validate a small amount of data after compression/decompression
* using SafeQuickLZ with level 1 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void SmallDataTestLevel1()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] SmallData = new byte[100];

// Fill the small data array with random data.
new Random().NextBytes(SmallData);

// Store the compressed data.
byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1);

// The original should not equal the compressed data.
Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a small amount of data after compression/decompression
* using SafeQuickLZ with level 3 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void SmallDataTestLevel3()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] SmallData = new byte[100];

// Fill the small data array with random data.
new Random().NextBytes(SmallData);

// Store the compressed data.
byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3);

// The original should not equal the compressed data.
Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a large amount of data after compression/decompression
* using SafeQuickLZ with level 1 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void BigDataTestLevel1()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] BigData = new byte[100000];

// Fill the big data array with random data.
new Random().NextBytes(BigData);

// Store the compressed data.
byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1);

// The original should not equal the compressed data.
Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a large amount of data after compression/decompression
* using SafeQuickLZ with level 3 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void BigDataTestLevel3()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] BigData = new byte[100000];

// Fill the big data array with random data.
new Random().NextBytes(BigData);

// Store the compressed data.
byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3);

// The original should not equal the compressed data.
Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
}

#endregion
}
}
21 changes: 0 additions & 21 deletions Client.Tests/Core/Information/GeoIP.Tests.cs

This file was deleted.

131 changes: 131 additions & 0 deletions Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using xServer.Core.Compression;

namespace xServer.Tests.Core.Compression
{
[TestClass]
public class SafeQuickLZTests
{
// Tests using pseudo-randomly generated data.
#region Random Data

/*
* Purpose: To validate a small amount of data after compression/decompression
* using SafeQuickLZ with level 1 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void SmallDataTestLevel1()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] SmallData = new byte[100];

// Fill the small data array with random data.
new Random().NextBytes(SmallData);

// Store the compressed data.
byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1);

// The original should not equal the compressed data.
Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a small amount of data after compression/decompression
* using SafeQuickLZ with level 3 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void SmallDataTestLevel3()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] SmallData = new byte[100];

// Fill the small data array with random data.
new Random().NextBytes(SmallData);

// Store the compressed data.
byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3);

// The original should not equal the compressed data.
Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a large amount of data after compression/decompression
* using SafeQuickLZ with level 1 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void BigDataTestLevel1()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] BigData = new byte[100000];

// Fill the big data array with random data.
new Random().NextBytes(BigData);

// Store the compressed data.
byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1);

// The original should not equal the compressed data.
Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
}

/*
* Purpose: To validate a large amount of data after compression/decompression
* using SafeQuickLZ with level 3 compression.
*/
[TestMethod]
[TestCategory("Compression")]
public void BigDataTestLevel3()
{
SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
byte[] BigData = new byte[100000];

// Fill the big data array with random data.
new Random().NextBytes(BigData);

// Store the compressed data.
byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3);

// The original should not equal the compressed data.
Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");

// Store the decompressed data.
byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);

// The compressed data should not equal the decompressed data.
Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
// The original data must equal the decompressed data; must be able to make a round-trip.
CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
}

#endregion
}
}
1 change: 1 addition & 0 deletions Server.Tests/Server.Tests.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
</Otherwise>
</Choose>
<ItemGroup>
<Compile Include="Core\Compression\SafeQuickLZ.Tests.cs" />
<Compile Include="Core\Encryption\AES.Tests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
Expand Down