diff --git a/Client.Tests/Client.Tests.csproj b/Client.Tests/Client.Tests.csproj
index 2ff7d184..99dd4b1d 100644
--- a/Client.Tests/Client.Tests.csproj
+++ b/Client.Tests/Client.Tests.csproj
@@ -55,9 +55,9 @@
+
-
@@ -66,7 +66,9 @@
Client
-
+
+
+
diff --git a/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs b/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs
new file mode 100644
index 00000000..324f22a7
--- /dev/null
+++ b/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs
@@ -0,0 +1,131 @@
+using System;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using xClient.Core.Compression;
+
+namespace xClient.Tests.Core.Compression
+{
+ [TestClass]
+ public class SafeQuickLZTests
+ {
+ // Tests using pseudo-randomly generated data.
+ #region Random Data
+
+ /*
+ * Purpose: To validate a small amount of data after compression/decompression
+ * using SafeQuickLZ with level 1 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void SmallDataTestLevel1()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] SmallData = new byte[100];
+
+ // Fill the small data array with random data.
+ new Random().NextBytes(SmallData);
+
+ // Store the compressed data.
+ byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a small amount of data after compression/decompression
+ * using SafeQuickLZ with level 3 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void SmallDataTestLevel3()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] SmallData = new byte[100];
+
+ // Fill the small data array with random data.
+ new Random().NextBytes(SmallData);
+
+ // Store the compressed data.
+ byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a large amount of data after compression/decompression
+ * using SafeQuickLZ with level 1 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void BigDataTestLevel1()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] BigData = new byte[100000];
+
+ // Fill the big data array with random data.
+ new Random().NextBytes(BigData);
+
+ // Store the compressed data.
+ byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a large amount of data after compression/decompression
+ * using SafeQuickLZ with level 3 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void BigDataTestLevel3()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] BigData = new byte[100000];
+
+ // Fill the big data array with random data.
+ new Random().NextBytes(BigData);
+
+ // Store the compressed data.
+ byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/Client.Tests/Core/Information/GeoIP.Tests.cs b/Client.Tests/Core/Information/GeoIP.Tests.cs
deleted file mode 100644
index d210c31d..00000000
--- a/Client.Tests/Core/Information/GeoIP.Tests.cs
+++ /dev/null
@@ -1,21 +0,0 @@
-using Microsoft.VisualStudio.TestTools.UnitTesting;
-using xClient.Core.Information;
-
-
-namespace xClient.Tests.Core.Information
-{
- [TestClass]
- public class GeoIPTests
- {
- [TestMethod]
- public void GetGeoIPTest()
- {
- var ipInformation = new GeoIP();
- Assert.IsNotNull(ipInformation.City);
- Assert.IsNotNull(ipInformation.Country);
- Assert.IsNotNull(ipInformation.CountryCode);
- Assert.IsNotNull(ipInformation.Region);
- Assert.IsNotNull(ipInformation.WanIp);
- }
- }
-}
\ No newline at end of file
diff --git a/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs b/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs
new file mode 100644
index 00000000..5317ac43
--- /dev/null
+++ b/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs
@@ -0,0 +1,131 @@
+using System;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using xServer.Core.Compression;
+
+namespace xServer.Tests.Core.Compression
+{
+ [TestClass]
+ public class SafeQuickLZTests
+ {
+ // Tests using pseudo-randomly generated data.
+ #region Random Data
+
+ /*
+ * Purpose: To validate a small amount of data after compression/decompression
+ * using SafeQuickLZ with level 1 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void SmallDataTestLevel1()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] SmallData = new byte[100];
+
+ // Fill the small data array with random data.
+ new Random().NextBytes(SmallData);
+
+ // Store the compressed data.
+ byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a small amount of data after compression/decompression
+ * using SafeQuickLZ with level 3 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void SmallDataTestLevel3()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] SmallData = new byte[100];
+
+ // Fill the small data array with random data.
+ new Random().NextBytes(SmallData);
+
+ // Store the compressed data.
+ byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a large amount of data after compression/decompression
+ * using SafeQuickLZ with level 1 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void BigDataTestLevel1()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] BigData = new byte[100000];
+
+ // Fill the big data array with random data.
+ new Random().NextBytes(BigData);
+
+ // Store the compressed data.
+ byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ /*
+ * Purpose: To validate a large amount of data after compression/decompression
+ * using SafeQuickLZ with level 3 compression.
+ */
+ [TestMethod]
+ [TestCategory("Compression")]
+ public void BigDataTestLevel3()
+ {
+ SafeQuickLZ safeQuickLZtest = new SafeQuickLZ();
+ byte[] BigData = new byte[100000];
+
+ // Fill the big data array with random data.
+ new Random().NextBytes(BigData);
+
+ // Store the compressed data.
+ byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3);
+
+ // The original should not equal the compressed data.
+ Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!");
+
+ // Store the decompressed data.
+ byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length);
+
+ // The compressed data should not equal the decompressed data.
+ Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!");
+ // The original data must equal the decompressed data; must be able to make a round-trip.
+ CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!");
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/Server.Tests/Server.Tests.csproj b/Server.Tests/Server.Tests.csproj
index 3038eca0..bc43822e 100644
--- a/Server.Tests/Server.Tests.csproj
+++ b/Server.Tests/Server.Tests.csproj
@@ -53,6 +53,7 @@
+