Skip to content
Merged

Fixes #169

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Starting from version 4.20.0:
- You can download .NET 10.0 at: <https://dotnet.microsoft.com/en-us/download/dotnet/10.0>
- Minimum supported Windows version is Windows 10.

Versions between version 4.11.0 and 4.19.4:
Versions between 4.11.0 and 4.19.4:

- Requires .NET Desktop Runtime 8.0.x (or SDK) installed to run the application.
- You can download .NET 8.0 at: <https://dotnet.microsoft.com/en-us/download/dotnet/8.0>
Expand Down
7 changes: 5 additions & 2 deletions Ultima/Helpers/MythicDecompress.cs
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,14 @@ public static byte[] InternalDecompress(Span<byte> input)

return output;
}
catch (Exception ex)
catch (InvalidDataException)
{
Console.WriteLine($"Error during decompression: {ex.Message}");
throw;
}
catch (Exception ex)
{
throw new InvalidDataException("Mythic decompression failed: " + ex.Message, ex);
}
}

//
Expand Down
187 changes: 150 additions & 37 deletions Ultima/StringList.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,13 @@ public sealed class StringList
public List<StringEntry> Entries { get; private set; }
public string Language { get; }

/// <summary>
/// Non-null when the file was loaded but parsing did not consume the full file cleanly
/// (e.g. a malformed entry). Contains a human-readable description of where parsing failed
/// and how many entries were salvaged. Caller should surface this to the user.
/// </summary>
public string LoadWarning { get; private set; }

private Dictionary<int, string> _stringTable;
private Dictionary<int, StringEntry> _entryTable;
private static byte[] _buffer = new byte[1024];
Expand Down Expand Up @@ -56,60 +63,166 @@ private void LoadEntry(string path)
byte[] buffer = new byte[fileStream.Length];
_ = fileStream.Read(buffer, 0, buffer.Length);

if (!TryParse(buffer, _decompress))
ParseResult primary = TryParse(buffer, _decompress);
if (primary.Success)
{
bool fallback = !_decompress;
if (!TryParse(buffer, fallback))
{
throw new InvalidDataException($"Failed to parse cliloc file '{path}' in either compressed or uncompressed format.");
}
_decompress = fallback;
Apply(primary);
return;
}

ParseResult fallback = TryParse(buffer, !_decompress);
if (fallback.Success)
{
_decompress = !_decompress;
Apply(fallback);
return;
}

// Both attempts failed. Prefer whichever extracted more entries — that's the format
// the file was actually in, just with a corrupt section somewhere.
bool keepPrimary = primary.EntriesParsed >= fallback.EntriesParsed;
ParseResult best = keepPrimary ? primary : fallback;
if (!keepPrimary)
{
_decompress = !_decompress;
}

if (best.EntriesParsed > 0)
{
Apply(best);
LoadWarning =
$"Cliloc '{path}' parsed partially as {FormatLabel(_decompress)}: " +
$"{best.EntriesParsed} entries recovered before parsing failed. {best.ErrorMessage}";
return;
}

throw new InvalidDataException(
$"Failed to parse cliloc file '{path}' in either compressed or uncompressed format." +
$"{Environment.NewLine} As {FormatLabel(_decompress)}: {primary.ErrorMessage}" +
$"{Environment.NewLine} As {FormatLabel(!_decompress)}: {fallback.ErrorMessage}");
}

private bool TryParse(byte[] buffer, bool decompress)
private void Apply(ParseResult result)
{
Entries = result.Entries;
_stringTable = result.StringTable;
_entryTable = result.EntryTable;
_header1 = result.Header1;
_header2 = result.Header2;
}

private static string FormatLabel(bool decompress) => decompress ? "compressed" : "uncompressed";

private struct ParseResult
{
public bool Success;
public int EntriesParsed;
public List<StringEntry> Entries;
public Dictionary<int, string> StringTable;
public Dictionary<int, StringEntry> EntryTable;
public int Header1;
public short Header2;
public string ErrorMessage;
}

private static ParseResult TryParse(byte[] buffer, bool decompress)
{
var result = new ParseResult
{
Entries = new List<StringEntry>(),
StringTable = new Dictionary<int, string>(),
EntryTable = new Dictionary<int, StringEntry>(),
};

byte[] clilocData;
try
{
byte[] clilocData = decompress ? MythicDecompress.Decompress(buffer) : buffer;
clilocData = decompress ? MythicDecompress.Decompress(buffer) : buffer;
}
catch (Exception ex)
{
result.ErrorMessage = $"decompression failed: {ex.Message}";
return result;
}

var entries = new List<StringEntry>();
var stringTable = new Dictionary<int, string>();
var entryTable = new Dictionary<int, StringEntry>();
// Header is 4 + 2 bytes.
if (clilocData.Length < 6)
{
result.ErrorMessage = $"file is {clilocData.Length} bytes, smaller than the 6-byte header.";
return result;
}

using var reader = new BinaryReader(new MemoryStream(clilocData));
_header1 = reader.ReadInt32();
_header2 = reader.ReadInt16();
using var stream = new MemoryStream(clilocData);
using var reader = new BinaryReader(stream);
result.Header1 = reader.ReadInt32();
result.Header2 = reader.ReadInt16();

while (reader.BaseStream.Length != reader.BaseStream.Position)
int lastNumber = -1;
while (stream.Position < stream.Length)
{
long entryStart = stream.Position;
long remaining = stream.Length - entryStart;

// Each entry header is 4 (number) + 1 (flag) + 2 (length) = 7 bytes.
if (remaining < 7)
{
int number = reader.ReadInt32();
byte flag = reader.ReadByte();
int length = reader.ReadInt16();
result.ErrorMessage =
$"unexpected {remaining} trailing byte(s) at offset 0x{entryStart:X} after entry #{lastNumber}; " +
$"need 7 bytes for the next entry header.";
return result;
}

if (length > _buffer.Length)
{
_buffer = new byte[(length + 1023) & ~1023];
}
int number = reader.ReadInt32();
byte flag = reader.ReadByte();
// Writer emits ushort; reading as signed Int16 truncates strings ≥32768 bytes to a negative length.
int length = reader.ReadUInt16();

reader.Read(_buffer, 0, length);
string text = Encoding.UTF8.GetString(_buffer, 0, length);
long bodyRemaining = stream.Length - stream.Position;
if (length > bodyRemaining)
{
result.ErrorMessage =
$"entry #{number} at offset 0x{entryStart:X} declares length {length}, " +
$"but only {bodyRemaining} byte(s) remain in the file " +
$"(previous entry was #{lastNumber}, parsed {result.EntriesParsed} so far).";
return result;
}

var se = new StringEntry(number, text, flag);
entries.Add(se);
stringTable[number] = text;
entryTable[number] = se;
if (length > _buffer.Length)
{
_buffer = new byte[(length + 1023) & ~1023];
}

Entries = entries;
_stringTable = stringTable;
_entryTable = entryTable;
return true;
}
catch
{
return false;
int read = reader.Read(_buffer, 0, length);
if (read != length)
{
result.ErrorMessage =
$"entry #{number} at offset 0x{entryStart:X} expected {length} body byte(s) " +
$"but only {read} were available.";
return result;
}

string text;
try
{
text = Encoding.UTF8.GetString(_buffer, 0, length);
}
catch (Exception ex)
{
result.ErrorMessage =
$"entry #{number} at offset 0x{entryStart:X} has {length} body bytes that are not valid UTF-8: {ex.Message}";
return result;
}

var se = new StringEntry(number, text, flag);
result.Entries.Add(se);
result.StringTable[number] = text;
result.EntryTable[number] = se;
result.EntriesParsed++;
lastNumber = number;
}

result.Success = true;
return result;
}

/// <summary>
Expand Down
6 changes: 6 additions & 0 deletions UoFiddler.Controls/UserControls/ClilocControl.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,12 @@ private int Lang
_cliloc = new StringList("custom2", false);
break;
}

if (!string.IsNullOrEmpty(_cliloc?.LoadWarning))
{
MessageBox.Show(this, _cliloc.LoadWarning, "Cliloc parsed with warnings",
MessageBoxButtons.OK, MessageBoxIcon.Warning);
}
}
}

Expand Down
69 changes: 64 additions & 5 deletions UoFiddler.Plugin.UopPacker/Classes/LegacyMulFileConverter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ private static BinaryWriter OpenOutput(string path)
//
// MUL -> UOP
//
public static void ToUop(string inFile, string inFileIdx, string outFile, FileType type, int typeIndex, CompressionFlag compressionFlag = CompressionFlag.None, string housingBinFile = "")
public static void ToUop(string inFile, string inFileIdx, string outFile, FileType type, int typeIndex, CompressionFlag compressionFlag = CompressionFlag.None, string housingBinFile = "", IProgress<int> progress = null)
{
// Same for all UOP files
const long firstTable = 0x200;
Expand Down Expand Up @@ -163,6 +163,10 @@ public static void ToUop(string inFile, string inFileIdx, string outFile, FileTy

string[] hashFormat = GetHashFormat(type, typeIndex, out int _);

int totalEntries = idxEntries.Count;
int lastReportedPct = -1;
progress?.Report(0);

for (int i = 0; i < tableCount; ++i)
{
long thisTable = writer.BaseStream.Position;
Expand Down Expand Up @@ -308,6 +312,16 @@ public static void ToUop(string inFile, string inFileIdx, string outFile, FileTy
tableEntries[tableIdx].Hash = HashAdler32(data);
writer.Write(data);
}

if (totalEntries > 0)
{
int pct = (j + 1) * 100 / totalEntries;
if (pct != lastReportedPct)
{
lastReportedPct = pct;
progress?.Report(pct);
}
}
}

long nextTable = writer.BaseStream.Position;
Expand Down Expand Up @@ -354,7 +368,7 @@ public static void ToUop(string inFile, string inFileIdx, string outFile, FileTy
//
// UOP -> MUL
//
public void FromUop(string inFile, string outFile, string outFileIdx, FileType type, int typeIndex, string housingBinFile = "")
public void FromUop(string inFile, string outFile, string outFileIdx, FileType type, int typeIndex, string housingBinFile = "", IProgress<int> progress = null)
{
Dictionary<ulong, int> chunkIds = new Dictionary<ulong, int>();
Dictionary<ulong, int> chunkIds2 = new Dictionary<ulong, int>();
Expand Down Expand Up @@ -391,6 +405,11 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
reader.ReadInt32(); // format timestamp? 0xFD23EC43

long nextTable = reader.ReadInt64();
reader.ReadInt32(); // table size (unused)
int totalFileCount = reader.ReadInt32();
int processedCount = 0;
int lastReportedPct = -1;
progress?.Report(0);

do
{
Expand All @@ -414,7 +433,8 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
offsets[i].DecompressedSize = reader.ReadInt32(); // decompressed size
offsets[i].Identifier = reader.ReadUInt64(); // filename hash (HashLittle2)
offsets[i].Hash = reader.ReadUInt32(); // data hash (Adler32)
offsets[i].Compressed = reader.ReadInt16() != 0; // compression method (0 = none, 1 = zlib)
offsets[i].CompressionFlag = reader.ReadInt16(); // compression method (0 = none, 1 = zlib, 3 = mythic)
offsets[i].Compressed = offsets[i].CompressionFlag != 0;
}

// Copy chunks
Expand Down Expand Up @@ -452,6 +472,17 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
writerBin.Write(binDataToWrite, 0, binDataToWrite.Length);
}

if (totalFileCount > 0)
{
++processedCount;
int pct = processedCount * 100 / totalFileCount;
if (pct != lastReportedPct)
{
lastReportedPct = pct;
progress?.Report(pct);
}
}

continue;
}

Expand Down Expand Up @@ -480,6 +511,11 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
chunkData = decompressed;
}

if (offsets[i].CompressionFlag == (short)CompressionFlag.Mythic)
{
chunkData = MythicDecompress.Decompress(chunkData);
}

if (type == FileType.MapLegacyMul)
{
// Write this chunk on the right position (no IDX file to point to it)
Expand Down Expand Up @@ -541,6 +577,17 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
mulWriter.Write(chunkData, dataOffset, chunkData.Length - dataOffset);
}
}

if (totalFileCount > 0)
{
++processedCount;
int pct = processedCount * 100 / totalFileCount;
if (pct != lastReportedPct)
{
lastReportedPct = pct;
progress?.Report(pct);
}
}
}

// Move to next table
Expand All @@ -551,10 +598,22 @@ public void FromUop(string inFile, string outFile, string outFileIdx, FileType t
}
while (nextTable != 0);

// Fix index
// Fix index. Only pad up to the highest used entry — `used.Length` is the hash-lookup
// upper bound (often 0x7FFFF), which would otherwise produce a multi-megabyte idx file
// padded with sentinel rows beyond any real entry.
if (idxWriter != null)
{
for (int i = 0; i < used.Length; ++i)
int padCount = 0;
for (int i = used.Length - 1; i >= 0; --i)
{
if (used[i])
{
padCount = i + 1;
break;
}
}

for (int i = 0; i < padCount; ++i)
{
if (used[i])
{
Expand Down
Loading