diff --git a/BMGEditor.sln b/BMGEditor.sln
index 7d8febb..013d5bb 100644
--- a/BMGEditor.sln
+++ b/BMGEditor.sln
@@ -8,13 +8,19 @@ EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{E7F50E37-2F2C-4918-80E0-E23D484875B8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E7F50E37-2F2C-4918-80E0-E23D484875B8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E7F50E37-2F2C-4918-80E0-E23D484875B8}.Debug|x64.ActiveCfg = Debug|x64
+ {E7F50E37-2F2C-4918-80E0-E23D484875B8}.Debug|x64.Build.0 = Debug|x64
{E7F50E37-2F2C-4918-80E0-E23D484875B8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E7F50E37-2F2C-4918-80E0-E23D484875B8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E7F50E37-2F2C-4918-80E0-E23D484875B8}.Release|x64.ActiveCfg = Release|x64
+ {E7F50E37-2F2C-4918-80E0-E23D484875B8}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/BMGEditor/BMGEditor.csproj b/BMGEditor/BMGEditor.csproj
index b57c89e..78ecb4d 100644
--- a/BMGEditor/BMGEditor.csproj
+++ b/BMGEditor/BMGEditor.csproj
@@ -5,7 +5,8 @@
net6.0-windows
enable
true
- enable
+ disable
+ AnyCPU;x64
\ No newline at end of file
diff --git a/BMGEditor/Bcsv.cs b/BMGEditor/Bcsv.cs
new file mode 100644
index 0000000..4395b43
--- /dev/null
+++ b/BMGEditor/Bcsv.cs
@@ -0,0 +1,362 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace BMGEditor
+{
+ public class Bcsv
+ {
+ public Bcsv(FileBase file)
+ {
+ m_File = file;
+ m_File.BigEndian = true;
+ m_File.Encoding = Encoding.GetEncoding("shift-jis");
+
+ Fields = new Dictionary();
+ Entries = new List();
+
+ m_File.Stream.Position = 0;
+ uint entrycount = m_File.Reader.ReadUInt32();
+ uint fieldcount = m_File.Reader.ReadUInt32();
+ uint dataoffset = m_File.Reader.ReadUInt32();
+ uint entrydatasize = m_File.Reader.ReadUInt32();
+
+ uint stringtableoffset = (uint)(dataoffset + (entrycount * entrydatasize));
+
+ for (uint i = 0; i < fieldcount; i++)
+ {
+ Field field = new Field();
+ m_File.Stream.Position = 0x10 + (0xC * i);
+
+ field.NameHash = m_File.Reader.ReadUInt32();
+ field.Mask = m_File.Reader.ReadUInt32();
+ field.EntryOffset = m_File.Reader.ReadUInt16();
+ field.ShiftAmount = m_File.Reader.ReadByte();
+ field.Type = m_File.Reader.ReadByte();
+
+ string fieldname = Bcsv.HashToFieldName(field.NameHash);
+ field.Name = fieldname;
+ Fields.Add(field.NameHash, field);
+ }
+
+ for (uint i = 0; i < entrycount; i++)
+ {
+ Entry entry = new Entry();
+
+ foreach (Field field in Fields.Values)
+ {
+ m_File.Stream.Position = dataoffset + (i * entrydatasize) + field.EntryOffset;
+
+ object val = null;
+ switch (field.Type)
+ {
+ case 0:
+ case 3:
+ val = (uint)((m_File.Reader.ReadUInt32() & field.Mask) >> field.ShiftAmount);
+ break;
+
+ case 4:
+ val = (ushort)((m_File.Reader.ReadUInt16() & field.Mask) >> field.ShiftAmount);
+ break;
+
+ case 5:
+ val = (byte)((m_File.Reader.ReadByte() & field.Mask) >> field.ShiftAmount);
+ break;
+
+ case 2:
+ val = m_File.Reader.ReadSingle();
+ break;
+
+ case 6:
+ uint str_offset = m_File.Reader.ReadUInt32();
+ m_File.Stream.Position = stringtableoffset + str_offset;
+ val = m_File.ReadString();
+ break;
+
+ default:
+ throw new NotImplementedException("Bcsv: unsupported data type " + field.Type.ToString());
+ }
+
+ entry.Add(field.NameHash, val);
+ }
+
+ Entries.Add(entry);
+ }
+ }
+
+ public void Flush()
+ {
+ int[] datasizes = { 4, -1, 4, 4, 2, 1, 4 };
+ uint entrysize = 0;
+
+ foreach (Field field in Fields.Values)
+ {
+ ushort fieldend = (ushort)(field.EntryOffset + datasizes[field.Type]);
+ if (fieldend > entrysize) entrysize = fieldend;
+ }
+
+ uint dataoffset = (uint)(0x10 + (0xC * Fields.Count));
+ uint stringtableoffset = (uint)(dataoffset + (Entries.Count * entrysize));
+ uint curstring = 0;
+
+ m_File.Stream.SetLength(stringtableoffset);
+
+ m_File.Stream.Position = 0;
+ m_File.Writer.Write((uint)Entries.Count);
+ m_File.Writer.Write((uint)Fields.Count);
+ m_File.Writer.Write(dataoffset);
+ m_File.Writer.Write(entrysize);
+
+ foreach (Field field in Fields.Values)
+ {
+ m_File.Writer.Write(field.NameHash);
+ m_File.Writer.Write(field.Mask);
+ m_File.Writer.Write(field.EntryOffset);
+ m_File.Writer.Write(field.ShiftAmount);
+ m_File.Writer.Write(field.Type);
+ }
+
+ int i = 0;
+ Dictionary stringoffsets = new Dictionary();
+
+ foreach (Entry entry in Entries)
+ {
+ foreach (Field field in Fields.Values)
+ {
+ uint valoffset = (uint)(dataoffset + (i * entrysize) + field.EntryOffset);
+ m_File.Stream.Position = valoffset;
+
+ switch (field.Type)
+ {
+ case 0:
+ case 3:
+ {
+ uint val = m_File.Reader.ReadUInt32();
+ val &= ~field.Mask;
+ val |= (((uint)entry[field.NameHash] << field.ShiftAmount) & field.Mask);
+
+ m_File.Stream.Position = valoffset;
+ m_File.Writer.Write(val);
+ }
+ break;
+
+ case 4:
+ {
+ ushort val = m_File.Reader.ReadUInt16();
+ val &= (ushort)(~field.Mask);
+ val |= (ushort)(((ushort)entry[field.NameHash] << field.ShiftAmount) & field.Mask);
+
+ m_File.Stream.Position = valoffset;
+ m_File.Writer.Write(val);
+ }
+ break;
+
+ case 5:
+ {
+ byte val = m_File.Reader.ReadByte();
+ val &= (byte)(~field.Mask);
+ val |= (byte)(((byte)entry[field.NameHash] << field.ShiftAmount) & field.Mask);
+
+ m_File.Stream.Position = valoffset;
+ m_File.Writer.Write(val);
+ }
+ break;
+
+ case 2:
+ m_File.Writer.Write((float)entry[field.NameHash]);
+ break;
+
+ case 6:
+ {
+ string val = (string)entry[field.NameHash];
+ if (stringoffsets.ContainsKey(val))
+ m_File.Writer.Write(stringoffsets[val]);
+ else
+ {
+ stringoffsets.Add(val, curstring);
+ m_File.Writer.Write(curstring);
+ m_File.Stream.Position = stringtableoffset + curstring;
+ curstring += (uint)m_File.WriteString(val);
+ }
+ }
+ break;
+ }
+ }
+
+ i++;
+ }
+
+ m_File.Flush();
+ }
+
+ public void Close()
+ {
+ m_File.Close();
+ }
+
+
+ public Field AddField(string name, int offset, byte type, uint mask, int shift, object defaultval)
+ {
+ int[] datasizes = { 4, -1, 4, 4, 2, 1, 4 };
+
+ AddHash(name); // hehe
+
+ int nbytes = datasizes[type];
+
+ if (type == 2 || type == 6)
+ {
+ mask = 0xFFFFFFFF;
+ shift = 0;
+ }
+
+ if (offset == -1)
+ {
+ foreach (Field field in Fields.Values)
+ {
+ ushort fieldend = (ushort)(field.EntryOffset + datasizes[field.Type]);
+ if (fieldend > offset) offset = fieldend;
+ }
+ }
+
+ Field newfield = new Field();
+ newfield.Name = name;
+ newfield.NameHash = Bcsv.FieldNameToHash(name);
+ newfield.Mask = mask;
+ newfield.ShiftAmount = (byte)shift;
+ newfield.Type = type;
+ newfield.EntryOffset = (ushort)offset;
+ Fields.Add(newfield.NameHash, newfield);
+
+ foreach (Entry entry in Entries)
+ {
+ entry.Add(name, defaultval);
+ }
+
+ return newfield;
+ }
+
+ public void RemoveField(string name)
+ {
+ uint hash = Bcsv.FieldNameToHash(name);
+ Fields.Remove(hash);
+
+ foreach (Entry entry in Entries)
+ {
+ entry.Remove(hash);
+ }
+ }
+
+
+ public class Field
+ {
+ public uint NameHash;
+ public uint Mask;
+ public ushort EntryOffset;
+ public byte ShiftAmount;
+ public byte Type;
+
+ public string Name;
+ }
+
+ public class Entry : Dictionary
+ {
+ public Entry()
+ : base()
+ { }
+
+ public object this[string key]
+ {
+ get
+ {
+ return this[Bcsv.FieldNameToHash(key)];
+ }
+ set
+ {
+ this[Bcsv.FieldNameToHash(key)] = value;
+ }
+ }
+
+ public void Add(string key, object val)
+ {
+ this.Add(Bcsv.FieldNameToHash(key), val);
+ }
+
+ public bool ContainsKey(string key)
+ {
+ return this.ContainsKey(Bcsv.FieldNameToHash(key));
+ }
+
+ public override string ToString()
+ {
+ string str = "BcsvEntry:";
+
+ foreach (KeyValuePair field in this)
+ {
+ str += " [" + field.Key.ToString("X8");
+ if (Bcsv.m_HashTable.ContainsKey(field.Key))
+ str += " (" + Bcsv.HashToFieldName(field.Key) + ")";
+ str += "]=[" + field.Value.ToString() + "]";
+ }
+
+ return str;
+ }
+ }
+
+
+ private FileBase m_File;
+
+ public Dictionary Fields;
+ public List Entries;
+
+
+ // Field name hash support functions
+ // the hash->string table is meant for debugging purposes and
+ // shouldn't be used by proper code
+
+ public static uint FieldNameToHash(string field)
+ {
+ uint ret = 0;
+ foreach (char ch in field)
+ {
+ ret *= 0x1F;
+ ret += ch;
+ }
+ return ret;
+ }
+
+ public static string HashToFieldName(uint hash)
+ {
+ if (!m_HashTable.ContainsKey(hash))
+ return string.Format("[{0:X8}]", hash);
+
+ return m_HashTable[hash];
+ }
+
+ public static void AddHash(string field)
+ {
+ uint hash = FieldNameToHash(field);
+ if (!m_HashTable.ContainsKey(hash))
+ m_HashTable.Add(hash, field);
+ }
+
+ public static void PopulateHashtable()
+ {
+ m_HashTable = new Dictionary();
+
+ string[] lines = new string[1];
+ foreach (string _line in lines)
+ {
+ string line = _line.Trim();
+
+ if (line.Length == 0) continue;
+ if (line[0] == '#') continue;
+
+ AddHash(line);
+ }
+ }
+
+ public static Dictionary m_HashTable;
+ }
+}
diff --git a/BMGEditor/BigEndian.cs b/BMGEditor/BigEndian.cs
new file mode 100644
index 0000000..ca1569f
--- /dev/null
+++ b/BMGEditor/BigEndian.cs
@@ -0,0 +1,113 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+
+namespace BMGEditor
+{
+ public class BinaryReaderBE : BinaryReader
+ {
+ public BinaryReaderBE(Stream s)
+ : base(s)
+ { }
+
+ public BinaryReaderBE(Stream s, Encoding e)
+ : base(s, e)
+ { }
+
+
+ public override short ReadInt16()
+ {
+ UInt16 val = base.ReadUInt16();
+ return (Int16)((val >> 8) | (val << 8));
+ }
+
+ public override int ReadInt32()
+ {
+ UInt32 val = base.ReadUInt32();
+ return (Int32)((val >> 24) | ((val & 0xFF0000) >> 8) | ((val & 0xFF00) << 8) | (val << 24));
+ }
+
+
+ public override ushort ReadUInt16()
+ {
+ UInt16 val = base.ReadUInt16();
+ return (UInt16)((val >> 8) | (val << 8));
+ }
+
+ public override uint ReadUInt32()
+ {
+ UInt32 val = base.ReadUInt32();
+ return (UInt32)((val >> 24) | ((val & 0xFF0000) >> 8) | ((val & 0xFF00) << 8) | (val << 24));
+ }
+
+
+ public override float ReadSingle()
+ {
+ byte[] stuff = base.ReadBytes(4);
+ if (BitConverter.IsLittleEndian) Array.Reverse(stuff);
+ float val = BitConverter.ToSingle(stuff, 0);
+ return val;
+ }
+
+ public override double ReadDouble()
+ {
+ byte[] stuff = base.ReadBytes(8);
+ if (BitConverter.IsLittleEndian) Array.Reverse(stuff);
+ double val = BitConverter.ToDouble(stuff, 0);
+ return val;
+ }
+ }
+
+
+ public class BinaryWriterBE : BinaryWriter
+ {
+ public BinaryWriterBE(Stream s)
+ : base(s)
+ { }
+
+ public BinaryWriterBE(Stream s, Encoding e)
+ : base(s, e)
+ { }
+
+
+ public override void Write(short value)
+ {
+ ushort val = (ushort)value;
+ base.Write((short)((val >> 8) | (val << 8)));
+ }
+
+ public override void Write(int value)
+ {
+ uint val = (uint)value;
+ base.Write((int)((val >> 24) | ((val & 0xFF0000) >> 8) | ((val & 0xFF00) << 8) | (val << 24)));
+ }
+
+
+ public override void Write(ushort value)
+ {
+ base.Write((ushort)((value >> 8) | (value << 8)));
+ }
+
+ public override void Write(uint value)
+ {
+ base.Write((uint)((value >> 24) | ((value & 0xFF0000) >> 8) | ((value & 0xFF00) << 8) | (value << 24)));
+ }
+
+
+ public override void Write(float value)
+ {
+ byte[] stuff = BitConverter.GetBytes(value);
+ if (BitConverter.IsLittleEndian) Array.Reverse(stuff);
+ base.Write(stuff);
+ }
+
+ public override void Write(double value)
+ {
+ byte[] stuff = BitConverter.GetBytes(value);
+ if (BitConverter.IsLittleEndian) Array.Reverse(stuff);
+ base.Write(stuff);
+ }
+ }
+}
diff --git a/BMGEditor/Compression.cs b/BMGEditor/Compression.cs
new file mode 100644
index 0000000..0cd245a
--- /dev/null
+++ b/BMGEditor/Compression.cs
@@ -0,0 +1,181 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace BMGEditor
+{
+ public static class Yaz0
+ {
+ // TODO: put compression in use?
+ // note: compression is slow when dealing with large files (eg. 3D models)
+ // it should be made optional, and show a progress dialog and all
+
+ private static void FindOccurence(byte[] data, int pos, ref int offset, ref int length)
+ {
+ offset = -1;
+ length = 0;
+
+ if (pos >= data.Length - 2) return;
+
+ Dictionary occurences = new Dictionary();
+
+ int len = 0;
+ int start = (pos > 4096) ? pos - 4096 : 0;
+ for (int i = start; i < pos; i++)
+ {
+ if (i >= data.Length - 2) break;
+
+ if (data[i] != data[pos] || data[i + 1] != data[pos + 1] || data[i + 2] != data[pos + 2])
+ continue;
+
+ len = 3;
+ while ((i + len < data.Length) && (pos + len < data.Length) && (data[i + len] == data[pos + len]))
+ len++;
+
+ occurences.Add(i, len);
+ }
+
+ foreach (KeyValuePair occ in occurences)
+ {
+ if (occ.Value > length)
+ {
+ offset = occ.Key;
+ length = occ.Value;
+ }
+ }
+ }
+
+ public static void Compress(ref byte[] data)
+ {
+ if (data[0] == 'Y' && data[1] == 'a' && data[2] == 'z' && data[3] == '0')
+ return;
+
+ byte[] output = new byte[16 + data.Length + (data.Length / 8)];
+
+ output[0] = (byte)'Y';
+ output[1] = (byte)'a';
+ output[2] = (byte)'z';
+ output[3] = (byte)'0';
+
+ uint fullsize = (uint)data.Length;
+ output[4] = (byte)(fullsize >> 24);
+ output[5] = (byte)(fullsize >> 16);
+ output[6] = (byte)(fullsize >> 8);
+ output[7] = (byte)fullsize;
+
+ int inpos = 0, outpos = 16;
+ int occ_offset = -1, occ_length = 0;
+
+ while (inpos < fullsize)
+ {
+ int datastart = outpos + 1;
+ byte block = 0;
+
+ for (int i = 0; i < 8; i++)
+ {
+ block <<= 1;
+
+ if (inpos < data.Length)
+ {
+ if (occ_offset == -2)
+ FindOccurence(data, inpos, ref occ_offset, ref occ_length);
+
+ int next_offset = -1, next_length = 0;
+ FindOccurence(data, inpos + 1, ref next_offset, ref next_length);
+ if (next_length > occ_length + 1) occ_offset = -1;
+
+ if (occ_offset != -1)
+ {
+ int disp = inpos - occ_offset - 1;
+ if (disp > 4095) throw new Exception("DISP OUT OF RANGE!");
+
+ if (occ_length > 17)
+ {
+ if (occ_length > 273) occ_length = 273;
+
+ output[datastart++] = (byte)(disp >> 8);
+ output[datastart++] = (byte)disp;
+ output[datastart++] = (byte)(occ_length - 18);
+ }
+ else
+ {
+ output[datastart++] = (byte)(((occ_length - 2) << 4) | (disp >> 8));
+ output[datastart++] = (byte)disp;
+ }
+
+ inpos += occ_length;
+ occ_offset = -2;
+ }
+ else
+ {
+ output[datastart++] = data[inpos++];
+ block |= 0x01;
+ }
+
+ if (occ_offset != -2)
+ {
+ occ_offset = next_offset;
+ occ_length = next_length;
+ }
+ }
+ }
+
+ output[outpos] = block;
+ outpos = datastart;
+ }
+
+ Array.Resize(ref data, outpos);
+ Array.Resize(ref output, outpos);
+ output.CopyTo(data, 0);
+ }
+
+ // inspired from http://www.amnoid.de/gc/yaz0.txt
+ public static void Decompress(ref byte[] data)
+ {
+ if (data[0] != 'Y' || data[1] != 'a' || data[2] != 'z' || data[3] != '0')
+ return;
+
+ int fullsize = (data[4] << 24) | (data[5] << 16) | (data[6] << 8) | data[7];
+ byte[] output = new byte[fullsize];
+
+ int inpos = 16, outpos = 0;
+ while (outpos < fullsize)
+ {
+ byte block = data[inpos++];
+
+ for (int i = 0; i < 8; i++)
+ {
+ if ((block & 0x80) != 0)
+ {
+ // copy one plain byte
+ output[outpos++] = data[inpos++];
+ }
+ else
+ {
+ // copy N compressed bytes
+ byte b1 = data[inpos++];
+ byte b2 = data[inpos++];
+
+ int dist = ((b1 & 0xF) << 8) | b2;
+ int copysrc = outpos - (dist + 1);
+
+ int nbytes = b1 >> 4;
+ if (nbytes == 0) nbytes = data[inpos++] + 0x12;
+ else nbytes += 2;
+
+ for (int j = 0; j < nbytes; j++)
+ output[outpos++] = output[copysrc++];
+ }
+
+ block <<= 1;
+ if (outpos >= fullsize || inpos >= data.Length)
+ break;
+ }
+ }
+
+ Array.Resize(ref data, fullsize);
+ output.CopyTo(data, 0);
+ }
+ }
+}
diff --git a/BMGEditor/FilesystemBase.cs b/BMGEditor/FilesystemBase.cs
new file mode 100644
index 0000000..c598654
--- /dev/null
+++ b/BMGEditor/FilesystemBase.cs
@@ -0,0 +1,107 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+
+namespace BMGEditor
+{
+ public class FilesystemBase
+ {
+ public virtual void Close() { }
+
+ public virtual string[] GetDirectories(string directory)
+ { throw new NotImplementedException("FilesystemBase.GetDirectories()"); }
+
+ public virtual bool DirectoryExists(string directory)
+ { throw new NotImplementedException("FilesystemBase.DirectoryExists()"); }
+
+
+ public virtual string[] GetFiles(string directory)
+ { throw new NotImplementedException("FilesystemBase.GetFiles()"); }
+
+ public virtual bool FileExists(string filename)
+ { throw new NotImplementedException("FilesystemBase.FileExists()"); }
+
+ public virtual FileBase OpenFile(string filename)
+ { throw new NotImplementedException("FilesystemBase.OpenFile()"); }
+ }
+
+ public class FileBase
+ {
+ public Stream Stream
+ {
+ get { return m_Stream; }
+ set
+ {
+ m_Stream = value;
+ InitRW();
+ }
+ }
+
+ public bool BigEndian
+ {
+ get { return m_BigEndian; }
+ set
+ {
+ m_BigEndian = value;
+ InitRW();
+ }
+ }
+
+ public Encoding Encoding
+ {
+ get { return m_Encoding; }
+ set
+ {
+ m_Encoding = value;
+ InitRW();
+ }
+ }
+
+ public BinaryReader Reader;
+ public BinaryWriter Writer;
+
+ private Stream m_Stream;
+ private bool m_BigEndian;
+ private Encoding m_Encoding = Encoding.ASCII;
+
+ private void InitRW()
+ {
+ Reader = m_BigEndian ? new BinaryReaderBE(m_Stream, m_Encoding) : new BinaryReader(m_Stream, m_Encoding);
+ Writer = m_BigEndian ? new BinaryWriterBE(m_Stream, m_Encoding) : new BinaryWriter(m_Stream, m_Encoding);
+ }
+
+
+ public string ReadString()
+ {
+ string ret = "";
+ char c;
+ while ((c = Reader.ReadChar()) != '\0')
+ ret += c;
+ return ret;
+ }
+
+ public int WriteString(string str)
+ {
+ int oldpos = (int)Stream.Position;
+
+ foreach (char c in str)
+ Writer.Write(c);
+ Writer.Write('\0');
+
+ return (int)(Stream.Position - oldpos);
+ }
+
+
+ public virtual void Flush()
+ {
+ m_Stream.Flush();
+ }
+
+ public virtual void Close()
+ {
+ m_Stream.Close();
+ }
+ }
+}
diff --git a/BMGEditor/MainForm.Designer.cs b/BMGEditor/MainForm.Designer.cs
index 42646b6..592fd0d 100644
--- a/BMGEditor/MainForm.Designer.cs
+++ b/BMGEditor/MainForm.Designer.cs
@@ -1,4 +1,8 @@
-namespace BMGEditor
+using System;
+using System.Collections;
+using System.Windows.Forms;
+
+namespace BMGEditor
{
partial class MainForm
{
diff --git a/BMGEditor/RarcFilesystem.cs b/BMGEditor/RarcFilesystem.cs
new file mode 100644
index 0000000..78edd52
--- /dev/null
+++ b/BMGEditor/RarcFilesystem.cs
@@ -0,0 +1,270 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+
+namespace BMGEditor
+{
+ public class RarcFilesystem : FilesystemBase
+ {
+ public RarcFilesystem(FileBase file)
+ {
+ m_File = file;
+ m_File.Stream = new Yaz0Stream(m_File.Stream);
+ m_File.BigEndian = true;
+
+ m_File.Stream.Position = 0;
+ uint tag = m_File.Reader.ReadUInt32();
+ if (tag != 0x52415243) throw new Exception("File isn't a RARC (tag 0x" + tag.ToString("X8") + ", expected 0x52415243)");
+
+ m_File.Stream.Position = 0xC;
+ m_FileDataOffset = m_File.Reader.ReadUInt32() + 0x20;
+ m_File.Stream.Position = 0x20;
+ m_NumDirNodes = m_File.Reader.ReadUInt32();
+ m_DirNodesOffset = m_File.Reader.ReadUInt32() + 0x20;
+ m_File.Stream.Position += 0x4;
+ m_FileEntriesOffset = m_File.Reader.ReadUInt32() + 0x20;
+ m_File.Stream.Position += 0x4;
+ m_StringTableOffset = m_File.Reader.ReadUInt32() + 0x20;
+
+ m_DirEntries = new Dictionary();
+ m_FileEntries = new Dictionary();
+
+ DirEntry root = new DirEntry();
+ root.ID = 0;
+ root.ParentDir = 0xFFFFFFFF;
+
+ m_File.Stream.Position = m_DirNodesOffset + 0x6;
+ uint rnoffset = m_File.Reader.ReadUInt16();
+ m_File.Stream.Position = m_StringTableOffset + rnoffset;
+ root.Name = m_File.ReadString();
+ root.FullName = "/" + root.Name;
+
+ m_DirEntries.Add(0, root);
+
+ for (uint i = 0; i < m_NumDirNodes; i++)
+ {
+ DirEntry parentdir = m_DirEntries[i];
+
+ m_File.Stream.Position = m_DirNodesOffset + (i * 0x10) + 10;
+
+ ushort numentries = m_File.Reader.ReadUInt16();
+ uint firstentry = m_File.Reader.ReadUInt32();
+
+ for (uint j = 0; j < numentries; j++)
+ {
+ uint entryoffset = m_FileEntriesOffset + ((j + firstentry) * 0x14);
+ m_File.Stream.Position = entryoffset;
+
+ uint fileid = m_File.Reader.ReadUInt16();
+ m_File.Stream.Position += 4;
+ uint nameoffset = m_File.Reader.ReadUInt16();
+ uint dataoffset = m_File.Reader.ReadUInt32();
+ uint datasize = m_File.Reader.ReadUInt32();
+
+ m_File.Stream.Position = m_StringTableOffset + nameoffset;
+ string name = m_File.ReadString();
+ if (name == "." || name == "..") continue;
+
+ string fullname = parentdir.FullName + "/" + name;
+
+ if (fileid == 0xFFFF)
+ {
+ DirEntry d = new DirEntry();
+ d.EntryOffset = entryoffset;
+ d.ID = dataoffset;
+ d.ParentDir = i;
+ d.NameOffset = nameoffset;
+ d.Name = name;
+ d.FullName = fullname;
+
+ m_DirEntries.Add(dataoffset, d);
+ }
+ else
+ {
+ FileEntry f = new FileEntry();
+ f.EntryOffset = entryoffset;
+ f.ID = fileid;
+ f.ParentDir = i;
+ f.NameOffset = nameoffset;
+ f.DataOffset = dataoffset;
+ f.DataSize = datasize;
+ f.Name = name;
+ f.FullName = fullname;
+
+ m_FileEntries.Add(fileid, f);
+ }
+ }
+ }
+ }
+
+ public override void Close()
+ {
+ m_File.Close();
+ }
+
+
+ public override bool DirectoryExists(string directory)
+ {
+ DirEntry dir = m_DirEntries.Values.FirstOrDefault(de => de.FullName.ToLower() == directory.ToLower());
+ return dir != null;
+ }
+
+ public override string[] GetDirectories(string directory)
+ {
+ DirEntry dir = m_DirEntries.Values.FirstOrDefault(de => de.FullName.ToLower() == directory.ToLower());
+ if (dir == null) return null;
+ IEnumerable subdirs = m_DirEntries.Values.Where(de => de.ParentDir == dir.ID);
+
+ List ret = new List();
+ foreach (DirEntry de in subdirs) ret.Add(de.Name);
+ return ret.ToArray();
+ }
+
+
+ public override bool FileExists(string filename)
+ {
+ FileEntry file = m_FileEntries.Values.FirstOrDefault(fe => fe.FullName.ToLower() == filename.ToLower());
+ return file != null;
+ }
+
+ public override string[] GetFiles(string directory)
+ {
+ DirEntry dir = m_DirEntries.Values.FirstOrDefault(de => de.FullName.ToLower() == directory.ToLower());
+ if (dir == null) return null;
+ IEnumerable files = m_FileEntries.Values.Where(fe => fe.ParentDir == dir.ID);
+
+ List ret = new List();
+ foreach (FileEntry fe in files) ret.Add(fe.Name);
+ return ret.ToArray();
+ }
+
+ public override FileBase OpenFile(string filename)
+ {
+ FileEntry file = m_FileEntries.Values.FirstOrDefault(fe => fe.FullName.ToLower() == filename.ToLower());
+ if (file == null) return null;
+
+ return new RarcFile(this, file.ID);
+ }
+
+
+ // support functions for RarcFile
+ public byte[] GetFileContents(RarcFile file)
+ {
+ FileEntry fe = m_FileEntries[file.ID];
+
+ m_File.Stream.Position = m_FileDataOffset + fe.DataOffset;
+ return m_File.Reader.ReadBytes((int)fe.DataSize);
+ }
+
+ public void ReinsertFile(RarcFile file)
+ {
+ FileEntry fe = m_FileEntries[file.ID];
+
+ uint fileoffset = m_FileDataOffset + fe.DataOffset;
+ int oldlength = (int)fe.DataSize;
+ int newlength = (int)file.Stream.Length;
+ int delta = newlength - oldlength;
+
+ if (newlength != oldlength)
+ {
+ m_File.Stream.Position = fileoffset + oldlength;
+ byte[] tomove = m_File.Reader.ReadBytes((int)(m_File.Stream.Length - m_File.Stream.Position));
+
+ m_File.Stream.Position = fileoffset + newlength;
+ m_File.Stream.SetLength(m_File.Stream.Length + delta);
+ m_File.Writer.Write(tomove);
+
+ fe.DataSize = (uint)newlength;
+ m_File.Stream.Position = fe.EntryOffset + 0xC;
+ m_File.Writer.Write(fe.DataSize);
+
+ foreach (FileEntry tofix in m_FileEntries.Values)
+ {
+ if (tofix.ID == fe.ID) continue;
+ if (tofix.DataOffset < (fe.DataOffset + oldlength)) continue;
+
+ tofix.DataOffset = (uint)(tofix.DataOffset + delta);
+ m_File.Stream.Position = tofix.EntryOffset + 0x8;
+ m_File.Writer.Write(tofix.DataOffset);
+ }
+ }
+
+ m_File.Stream.Position = fileoffset;
+ file.Stream.Position = 0;
+ byte[] data = file.Reader.ReadBytes(newlength);
+ m_File.Writer.Write(data);
+
+ m_File.Flush();
+ }
+
+
+ private class FileEntry
+ {
+ public uint EntryOffset;
+
+ public uint ID;
+ public uint NameOffset;
+ public uint DataOffset;
+ public uint DataSize;
+
+ public uint ParentDir;
+
+ public string Name;
+ public string FullName;
+ }
+
+ private class DirEntry
+ {
+ public uint EntryOffset;
+
+ public uint ID;
+ public uint NameOffset;
+
+ public uint ParentDir;
+
+ public string Name;
+ public string FullName;
+ }
+
+
+ private FileBase m_File;
+
+ private uint m_FileDataOffset;
+ private uint m_NumDirNodes;
+ private uint m_DirNodesOffset;
+ private uint m_FileEntriesOffset;
+ private uint m_StringTableOffset;
+
+ private Dictionary m_FileEntries;
+ private Dictionary m_DirEntries;
+ }
+
+
+ public class RarcFile : FileBase
+ {
+ public RarcFile(RarcFilesystem fs, uint id)
+ {
+ m_FS = fs;
+ m_ID = id;
+
+ byte[] buffer = m_FS.GetFileContents(this);
+ Stream = new MemoryStream(buffer.Length);
+ Writer.Write(buffer);
+ }
+
+ public override void Flush()
+ {
+ Stream.Flush();
+ m_FS.ReinsertFile(this);
+ }
+
+
+ private RarcFilesystem m_FS;
+ private uint m_ID;
+
+ public uint ID { get { return m_ID; } }
+ }
+}
diff --git a/BMGEditor/Yaz0Stream.cs b/BMGEditor/Yaz0Stream.cs
new file mode 100644
index 0000000..f2709cc
--- /dev/null
+++ b/BMGEditor/Yaz0Stream.cs
@@ -0,0 +1,54 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+
+namespace BMGEditor
+{
+ public class Yaz0Stream : MemoryStream
+ {
+ public Yaz0Stream(Stream backend)
+ : base(1)
+ {
+ if (backend is Yaz0Stream) throw new Exception("sorry but no");
+
+ m_Backend = backend;
+
+ m_Backend.Position = 0;
+ byte[] buffer = new byte[m_Backend.Length];
+ m_Backend.Read(buffer, 0, (int)m_Backend.Length);
+
+ Yaz0.Decompress(ref buffer);
+ Position = 0;
+ Write(buffer, 0, buffer.Length);
+ }
+
+ public void Flush(bool recompress)
+ {
+ byte[] buffer = new byte[Length];
+ Position = 0;
+ Read(buffer, 0, (int)Length);
+ if (recompress) Yaz0.Compress(ref buffer);
+
+ m_Backend.Position = 0;
+ m_Backend.SetLength(buffer.Length);
+ m_Backend.Write(buffer, 0, buffer.Length);
+ m_Backend.Flush();
+ }
+
+ public override void Flush()
+ {
+ Flush(false);
+ }
+
+ public override void Close()
+ {
+ m_Backend.Close();
+ base.Close();
+ }
+
+
+ private Stream m_Backend;
+ }
+}