Last active
October 5, 2025 00:12
-
-
Save mateon1/b123a53d824305767989ebc2ba3e5814 to your computer and use it in GitHub Desktop.
Intrinsic Alchemy 3.2 .igb file parsing code (format version 6 only, 5 and 7 might work but untested). Use in interactive 'python -i' mode to explore parsed data
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from __future__ import print_function | |
| import struct | |
| import sys | |
| try: | |
| FileNotFoundError | |
| except NameError: | |
| FileNotFoundError = OSError | |
| # == UTILS & MEMORY PARSING == | |
| def xdump(data, p, l, base=16, cols=16, highlight=set(), offs=None, autoskip=True): | |
| assert base in {2,4,8,16} | |
| bpc = {2: 1, 4: 2, 8: 3, 16: 4}[base] | |
| dpb = {2: 8, 4: 4, 8: 3, 16: 2}[base] | |
| end = p + l | |
| offs = offs % cols if offs is not None else p % cols | |
| nextp = p + cols - offs | |
| canskip = False | |
| skipped = False | |
| while p < end: | |
| if autoskip and canskip and nextp < end and not any(c for c in data[p:nextp].tolist()): | |
| skipped = "zeros" | |
| p = nextp | |
| offs = 0 | |
| nextp = p + cols | |
| continue | |
| if autoskip and canskip and nextp < end and not any(c ^ 0xaa for c in data[p:nextp].tolist()): | |
| skipped = "padding" | |
| p = nextp | |
| offs = 0 | |
| nextp = p + cols | |
| continue | |
| if skipped: | |
| print("[2;37m-* %s omitted *-[0m" % skipped) | |
| skipped = False | |
| sys.stdout.write("%08x:"%(p-offs) + " "*(offs*(dpb+1))) | |
| op = p | |
| while p < nextp and p < end: | |
| sys.stdout.write(" \x1b[7m" if p in highlight else "\x1b[0m ") | |
| for i in reversed(range(0, 8, bpc)): | |
| sys.stdout.write("0123456789abcdef"[(data[p:p+1].tolist()[0]>>i)&(base-1)]) | |
| p += 1 | |
| sys.stdout.write("[0m") | |
| if nextp > end: sys.stdout.write(" "*((nextp-end)*(dpb+1))) | |
| sys.stdout.write(" " * (offs+1)) | |
| while op < nextp and op < end: | |
| c = "[1;32m%s[0m" % chr(data[op]) if 0x20 <= data[op] <= 0x7e else "[1;30m.[0m" if data[op] == 0 else "[1;30m~[0m" if data[op] == 0xaa else "[47m#[0m" if data[op] == 0xff else "[0;36mx[0m" | |
| sys.stdout.write(c) | |
| op += 1 | |
| sys.stdout.write("\n") | |
| offs = 0 | |
| nextp = p + cols | |
| canskip = True | |
| sys.stdout.flush() | |
| # SLOW!! | |
| def findAll(mem, data): | |
| b = mem.tobytes() | |
| p = b.find(data) | |
| while p != -1: | |
| yield p | |
| p = b.find(data, p+1) | |
| def findBackRefs(mem, addr): | |
| return findAll(mem, struct.pack(">I", addr)) | |
| def findRandom(mem): | |
| a, b = list(findBackRefs(mem, 2152444972)) # should be metaobj, actual instance | |
| assert read_obj(mem, b)[0] == b"igMersenneTwisterRandomNumber" | |
| return b | |
| def getRNGoffset(mem, offs): | |
| target, = predictRNG(mem, offs, 1) | |
| i = 0 | |
| for r in predictRNG(): | |
| if r == target: return i | |
| i += 1 | |
| def rngpretty(index): return "%d× +%03x"%divmod(index, 624) | |
| def predictRNG(mem=None, offs=None, n=None): | |
| def gen(r): | |
| while True: yield r.getrandbits(32) | |
| # standard mersenne twister | |
| if n is not None: | |
| return [r for i,r in zip(range(n), predictRNG(mem, offs))] | |
| if offs is not None: | |
| _, rngobj = read_obj(mem, offs) | |
| mti, state = rngobj[b"_mti"], rngobj[b"_mt"] | |
| else: # initial state | |
| mti = 624 | |
| state = [] | |
| seed = 0x141e | |
| for i in range(624): | |
| t = seed >> 0x10 | |
| seed = (seed*0x10dcd+1)&~(-1<<32) | |
| t <<= 0x10 | |
| t |= seed >> 0x10 | |
| seed = (seed*0x10dcd+1)&~(-1<<32) | |
| state.append(t) | |
| state = tuple(state) | |
| # fragile, but seems to work? | |
| import random | |
| r = random.Random() | |
| r.setstate((3, state + (mti,), None)) | |
| return gen(r) | |
| def getfields(mem, offs, ty, forcelist=False, endian=">"): | |
| offs &= 0x7fffffff | |
| assert offs < len(mem), "offs %08x mem %08x" % (offs, len(mem)) | |
| l = struct.calcsize(endian + ty) | |
| res = struct.unpack(endian + ty, mem[offs:offs+l]) | |
| if not forcelist and len(res) == 1: return res[0] | |
| return res | |
| def getraw(mem, offs, ln): | |
| offs &= 0x7fffffff | |
| return mem[offs:offs+ln].tobytes() | |
| def read_u8 (mem, offs): return getfields(mem, offs, "B") | |
| def read_u16(mem, offs): return getfields(mem, offs, "H") | |
| def read_u32(mem, offs): return getfields(mem, offs, "I") | |
| def read_s8 (mem, offs): return getfields(mem, offs, "b") | |
| def read_s16(mem, offs): return getfields(mem, offs, "h") | |
| def read_s32(mem, offs): return getfields(mem, offs, "i") | |
| def findnul(mem, offs): | |
| i = 0 | |
| while offs + i < len(mem): | |
| p = mem[offs+i:offs+i+256].tobytes().find(b"\0") | |
| if p >= 0: return i + p | |
| i += 256 | |
| def read_cstr(mem, offs): | |
| offs &= 0x7fffffff | |
| return mem[offs:offs+findnul(mem, offs)].tobytes() | |
| def read_internallist(mem, offs): | |
| ref, cnt = getfields(mem, offs+8, "II") | |
| assert cnt < 100000 # arbitrary to prevent ram eatage | |
| if cnt == 0: return [] | |
| return list(getfields(mem, ref, cnt*"I", True)) | |
| def read_metaobj(mem, offs): | |
| index, mjr, mnr, volatile, persistent, abstract, name, rtsize, fields, children, parent, size, alignment, vtable = getfields(mem, offs, "xxxx xxxx xxxx III BBBx II xxxx I xxxx xxxx II xxxx xxxx xxxx I xxxx xxxx xxxx Hxx I xxxx") | |
| assert (mjr, mnr) == (1, 0) # exploratory, can be removed | |
| assert not volatile # exploratory, can be removed | |
| name = read_cstr(mem, name) | |
| fields = read_internallist(mem, fields) | |
| return (name, index, size, rtsize, alignment, fields, parent, vtable, persistent, abstract) | |
| def read_metafield(mem, offs): | |
| offset, name, tyidx, slot, size, parentmeta, default = getfields(mem, offs, "xxxx xxxx II hhHxx I xxxx I") | |
| name = read_cstr(mem, name) | |
| return offset, name, tyidx, slot, size, parentmeta, default | |
| def read_obj(mem, offs, loud=False, doall=False): | |
| meta = parseload(mem, read_u32(mem, read_u32(mem, offs+0) + 4 * 0x16)) | |
| oname, _, size, rtsize, _, fields, parent, vtable, _, _ = read_metaobj(mem, meta) | |
| parentname = read_cstr(mem, read_u32(mem, parent + 0x1c)) if parent else b"NULL" | |
| if loud: print((b"%s (sz 0x%x, rt %d) : %s -- %d fields:" % (oname, size, rtsize, parentname, len(fields))).decode("sjis", "backslashreplace")) | |
| d = {} | |
| for f in fields: | |
| fmeta = parseload(mem, read_u32(mem, read_u32(mem, f+0) + 4 * 0x16)) | |
| fname = read_cstr(mem, read_u32(mem, fmeta + 0x1c)) | |
| offset, name, _, _, size, _, _ = read_metafield(mem, f) | |
| if not doall and offset == 4: continue | |
| if fname.startswith(b"ig"): fname = fname[2:] | |
| if fname.endswith(b"MetaField"): fname = fname[:-9] | |
| if loud: | |
| print((b"- %s (+0x%02x, %d bytes): %s = " % (name, offset, size, fname)).decode("sjis"), end="") | |
| sys.stdout.flush() | |
| tyovr = None | |
| if fname == b"Float": tyovr = "f" | |
| if fname == b"Long": tyovr = "q" | |
| if fname == b"Vec3f": tyovr = 3*"f" | |
| if fname == b"Vec4f": tyovr = 4*"f" | |
| if fname == b"Matrix44f": tyovr = 16*"f" | |
| if fname == b"Double": tyovr = "d" | |
| if fname == b"UnsignedIntArray": tyovr = (size//4)*"I" | |
| if fname == b"IntArray": tyovr = (size//4)*"i" | |
| d[name] = getfields(mem, offs + offset, tyovr or {1: "b", 2: "h", 4: "i"}[size]) if tyovr or size in {1,2,4} else (getraw(mem, offs, size), fname) | |
| if fname == b"String": d[name] = read_cstr(mem, d[name]) if d[name] else None | |
| if fname == b"MemoryRef": d[name] = ("mem", d[name]&~(-1<<32), read_u32(mem, fmeta + 56), read_u32(mem, fmeta + 60), read_u32(mem, fmeta + 72)) | |
| if fname == b"ObjectRef": d[name] = ("obj", d[name]&~(-1<<32), read_u32(mem, fmeta + 56)) | |
| if type(d[name]) == int and d[name]&0xfe000000 == 0x80000000: | |
| d[name]&=~(-1<<32) | |
| if loud: print(hex(d[name])) | |
| elif loud and type(d[name]) == tuple and d[name][0] in {"obj", "mem"}: | |
| l = list(d[name]) | |
| if d[name][0] == "obj": | |
| if l[1]: | |
| ot, o = read_obj(mem, l[1]) | |
| if is_subclass(mem, l[1], b"igDataList"): | |
| l[1] = (hex(l[1]), ot, o[b"_count"]) | |
| else: | |
| l[1] = (hex(l[1]), ot) | |
| else: l[1] = (0, b"NULL") | |
| else: l[1] = hex(l[1]) | |
| l[2] = read_obj(mem, l[2])[1][b"_name"] | |
| print(repr(tuple(l))) | |
| elif loud: | |
| print(repr(d[name])) | |
| return (oname, d) | |
| def read_rawlist(mem, offs, ln, fty, endian=">"): | |
| s = struct.calcsize(endian+fty) | |
| #if len(struct.unpack(">"+fty, b"\0"*s)) == 1: return list(getfields(mem, offs, ln*fty)) | |
| return [getfields(mem, offs+i*s, fty, endian=endian) for i in range(ln)] | |
| def is_subclass(mem, offs, clazz): | |
| meta = parseload(mem, read_u32(mem, read_u32(mem, offs+0) + 4 * 0x16)) | |
| while meta: | |
| _, mf = read_obj(mem, meta) | |
| if mf[b"_name"] == clazz: return True | |
| meta = mf[b"_parent"][1] | |
| return False | |
| def read_list(mem, offs): | |
| ty, fields = read_obj(mem, offs) | |
| if is_subclass(mem, offs, b"igObjectList"): read = "I" | |
| else: | |
| assert is_subclass(mem, offs, b"igDataList") | |
| assert False | |
| return read_rawlist(mem, fields[b"_data"][1], fields[b"_count"], read) | |
| r13 = 0x80564f00 ## NOTE: r13 from GDJEB2 (Digimon World 4), necessary to parse raw memory dumps (object meta resolution) | |
| def sext(val, bits=32): | |
| val&=~(-1<<bits) | |
| if val>>(bits-1): val^=-1<<bits | |
| return val | |
| def parseloadaddr(mem, offs): | |
| inst = read_u32(mem, offs) | |
| if inst&0xffff0000 == 0x806d0000: # r13 lwz | |
| offs = inst&0xffff | |
| if offs&0x8000: offs ^= -1 << 16 | |
| return r13+offs | |
| elif inst&0xffff0000 == 0x3c600000: # lis, addi, lwz | |
| inst2 = read_u32(mem, offs+4) | |
| assert inst2 & 0xffff0000 == 0x38630000 | |
| inst3 = read_u32(mem, offs+8) # suboptimal codegen? Why not just r3-relative lwz instead of previous addi? | |
| assert inst3 == 0x80630000 | |
| return ((inst & 0xffff) << 16) + sext(inst2 & 0xffff, 16) | |
| else: | |
| assert False, "Unknown inst: %08x" % inst | |
| def parseload(mem, offs): return read_u32(mem, parseloadaddr(mem, offs)) | |
| databases = {} | |
| def getdb(name, igb=None): | |
| if not databases: | |
| if igb is None: igb = globals()["igb"] | |
| for l in igb.parseList(igb.infoListPlace): | |
| if igb.refInfo[l][1][1][0] == b"beDBManagerInfo": | |
| f = igb.getInfoDict(l) | |
| databases[f["_name"]] = igb.readDataBase(l, True)[1] | |
| if type(name) != type(""): name = name.decode("sjis") | |
| return databases.get(name) | |
| def dropreport(mem, offs): | |
| o = read_obj(mem, offs) | |
| print(o[1][b"_name"]) | |
| acc = 0 | |
| for f in read_list(mem, o[1][b"_dataList"][1]): | |
| _, f = read_obj(mem, f) | |
| db = getdb(f[b"_name"].decode("sjis")) | |
| rate = f[b"_rate"] | |
| i = f[b"_id"] | |
| if db: | |
| name, txt = db[i].get("NAME"), db[i].get("TXT", db[i].get("UP", "???")) | |
| print(("% 5d [%05d-%05d]: %s %d %s - %r" % (rate, acc, acc+rate, f[b"_name"].decode("sjis"), i, name, txt)).replace("¶", "×").replace("?","[WPN]").replace("?","[AB1]").replace("?","[AB2]").replace("?","[CHP]").replace("?","[DRK]").replace("?","[ELE]").replace("?","[ICE]").replace("?","[FLM]")) | |
| else: | |
| print( "% 5d [%05d-%05d]: %r" % (rate, acc, acc+rate, (b["_name"].decode("sjis"), i))) | |
| acc += rate | |
| return o[1][b"_name"] | |
| dropseen = {2168127096: b'nZ1gobr1', 2168118248: b'hZ1gobr1', 2168130872: b'vZ1gobr1', 2168127560: b'nZ1nume1', 2165758040: b'hZ1nume1', 2168121672: b'vZ1nume1', 2165757032: b'nZ1ogre1', 2168120712: b'hZ1ogre1', 2168122368: b'vZ1ogre1', 2168128616: b'nZ1obje1', 2168120856: b'hZ1obje1', 2168123760: b'vZ1obje1', 2165757720: b'nZ1reizu', 2165756848: b'nZ1gate', 2168130192: b'nZ1bit'} | |
| def dropreportall(mem, offs): | |
| o = read_obj(mem, offs) | |
| print(o) | |
| for s, n, l in zip(["== normal ==", "== hard ==", "== very hard =="], read_list(mem, o[1][b"_itemDataList"][1]), read_list(mem, o[1][b"_itemLastDataList"][1])): | |
| print(s) | |
| if n in dropseen: print(dropseen[n], "*") | |
| else: dropseen[n] = dropreport(mem, n) | |
| if l in dropseen: print(dropseen[l], "*") | |
| else: dropseen[l] = dropreport(mem, l) | |
| # == IGB parsing == | |
| H_entryBufferSize = 0 | |
| H_entryCount = 1 | |
| H_metaObjBufferSize = 2 | |
| H_metaObjCount = 3 | |
| H_objBufferSize = 4 | |
| H_objCount = 5 | |
| H_mrefBufferSize = 6 | |
| H_mrefCount = 7 | |
| H_mfBufferSize = 8 | |
| H_mfCount = 9 | |
| H_magicCookie = 10 | |
| H_verFlags= 11 | |
| UNKFIELDS = set() | |
| class IGB: | |
| endian = None | |
| def __init__(this, filename): | |
| this.filename = filename | |
| this.data = None | |
| this.v = None | |
| this.header = None | |
| global igb_ref | |
| igb_ref = this | |
| # NOTE: parse order is different for V3 and earlier, could handle it but currently unimplemented | |
| this.open() | |
| this.readHeader() | |
| this.readMf() | |
| this.readAlign() | |
| this.readMetaObj() | |
| if this.hasExternal: this.readExternalDirs() | |
| if this.hasMemoryPoolNames: this.readMemoryPoolNames() | |
| assert this.sharedEntries | |
| this.readUniqueEntries() | |
| this.readIndex() | |
| this.prepareEntries() | |
| this.readInfoV4() | |
| this.readObjects() | |
| this.readMemoryRefs() | |
| this.postProcess() | |
| def open(this): | |
| with open(this.filename, "rb") as f: this.data = f.read() | |
| this.v = memoryview(this.data) | |
| def readHeader(this): | |
| v = this.v | |
| if struct.unpack("<I", v[0x28:0x2c])[0] == 0xfada: | |
| this.endian = "<" | |
| else: | |
| this.endian = ">" | |
| this.header = struct.unpack(this.endian + "I"*12, v[:0x30]) | |
| this.version = this.header[H_verFlags] & 0xffff | |
| assert this.version < 8 # untested, GDJEB2 makes this check so probably is a (planned) major/incompatible bump | |
| this.hasInfo = this.header[H_verFlags] & 0x80000000 != 0 | |
| this.hasExternal = this.header[H_verFlags] & 0x40000000 != 0 | |
| this.sharedEntries = this.header[H_verFlags] & 0x20000000 != 0 | |
| this.hasMemoryPoolNames = this.header[H_verFlags] & 0x10000000 != 0 | |
| assert this.version >= 5 or this.hasExternal == False # invalid case, external refs introduced in v5 | |
| this.v = v[0x30:] | |
| def readMf(this): | |
| v = this.v | |
| this.mfBuf = v[:this.header[H_mfBufferSize]] | |
| this.v = v[this.header[H_mfBufferSize]:] | |
| mfCount = this.header[H_mfCount] | |
| buf = this.mfBuf[:mfCount*0xc] | |
| bufDyn = this.mfBuf[mfCount*0xc:] | |
| this.mfList = [] | |
| for i in range(mfCount): | |
| nameLen, maj, mnr = struct.unpack(this.endian + "III", buf[:0xc]) | |
| name = bufDyn[:nameLen] | |
| assert name[nameLen-1:nameLen] == b"\0" | |
| name = bufDyn[:nameLen-1].tobytes() | |
| buf = buf[0xc:] | |
| bufDyn = bufDyn[nameLen:] | |
| shortname = name | |
| if name.startswith(b"ig"): shortname = shortname[2:] | |
| if name.endswith(b"MetaField"): shortname = shortname[:-9] | |
| this.mfList.append((name, (maj, mnr), shortname)) | |
| def readAlign(this): | |
| l, = struct.unpack(this.endian + "I", this.v[:4]) | |
| this.alignBuf = this.v[:l] | |
| this.v = this.v[l:] | |
| def readMetaObj(this): | |
| this.mObjBuf = this.v[:this.header[H_metaObjBufferSize]] | |
| this.v = this.v[this.header[H_metaObjBufferSize]:] | |
| assert this.version >= 4 # V3 has no field info here | |
| mObjCount = this.header[H_metaObjCount] | |
| buf = this.mObjBuf[:24 * mObjCount] | |
| dynBuf = this.mObjBuf[24 * mObjCount:] | |
| this.mObjList = [] | |
| #this.mfPerObjIndices = [] | |
| #this.mObjRuntimeSlotList = [] | |
| for i in range(mObjCount): | |
| nameLen, maj, mnr, nfields, parent, slots = struct.unpack(this.endian + "IIIIiI", buf[:24]) | |
| name = dynBuf[:nameLen].tobytes().rstrip(b"\0") | |
| if maj == 0: maj = 1 | |
| dynBuf = dynBuf[nameLen:] | |
| fields = [] | |
| for j in range(nfields): | |
| ty, slot, size = struct.unpack(this.endian + "HHH", dynBuf[:6]) | |
| fields.append((this.mfList[ty][2], ty, slot, size)) | |
| dynBuf = dynBuf[6:] | |
| this.mObjList.append((name, maj, mnr, fields, (this.mObjList[parent][0], parent) if parent != -1 else None, slots)) | |
| buf = buf[24:] | |
| def readExternalDirs(this): | |
| if not this.hasExternal: return | |
| bufSize, unk, extCount = struct.unpack(this.endian + "III", this.v[:12]) | |
| this.extBuf = this.v[:bufSize] | |
| this.v = this.v[bufSize:] | |
| buf = this.extBuf[12:12+4*extCount] | |
| dynBuf = this.extBuf[12+4*extCount:] | |
| this.extDirs = [] | |
| for i in range(extCount): | |
| nameLen, = struct.unpack(this.endian + "I", buf[:4]) | |
| name = dynBuf[:nameLen].tobytes().rstrip(b"\0") | |
| this.extDirs.append(name) | |
| buf = buf[4:] | |
| dynBuf = dynBuf[nameLen:] | |
| def readMemoryPoolNames(this): | |
| bufSize, numPools = struct.unpack(this.endian + "II", this.v[:8]) | |
| this.mpoolBuf = this.v[:bufSize] | |
| this.v = this.v[bufSize:] | |
| dynBuf = this.mpoolBuf[8:].tobytes() | |
| this.mpoolNames = [] | |
| for i in range(numPools): | |
| l = dynBuf.find(b"\0") | |
| this.mpoolNames.append(dynBuf[:l]) | |
| dynBuf = dynBuf[l+1:] | |
| def readUniqueEntries(this): | |
| # for each entry: | |
| # makeObject (by mObjIdx) | |
| # must be igDirEntry | |
| # fields reset/read from stream | |
| # call vtable[0x10](obj) (noop?) | |
| # if useMemoryPoolAssignmentsState: use memoryPoolHandles[obj.vtable[0x1e]()] | |
| # else use -1 | |
| # to call obj.vtable[x1d] | |
| # save to uniqueEntryList sub-igDirectory | |
| this.entryBuf = this.v[:this.header[H_entryBufferSize]] | |
| this.v = this.v[this.header[H_entryBufferSize]:] | |
| dynBuf = this.entryBuf | |
| this.entries = [] | |
| for i in range(this.header[H_entryCount]): | |
| entType, entSize = struct.unpack(this.endian + "II", dynBuf[:8]) | |
| entData = dynBuf[8:entSize] | |
| fieldinfo = this.mObjList[entType][3] | |
| entFields = [] | |
| this.entries.append((entType, entFields)) | |
| for name,tyidx,slot,size in fieldinfo: | |
| entFields.append((slot, struct.unpack(this.endian + {4:"i",2:"h",1:"B"}[size], entData[:size])[0], (name, tyidx))) | |
| assert not any(entData[size:(size+3)&~3].tolist()), repr(entData[size:(size+3)&~3].tobytes()) | |
| entData = entData[(size+3)&~3:] | |
| assert len(entData) == 0 | |
| dynBuf = dynBuf[entSize:] | |
| assert len(dynBuf) == 0 | |
| def readIndex(this): | |
| # for each index: | |
| # obj = get unique entry by index | |
| # save obj to igb->_data | |
| bufSize, numIdx = struct.unpack(this.endian + "II", this.v[:8]) | |
| this.idxBuf = this.v[:bufSize] | |
| this.v = this.v[bufSize:] | |
| this.index = [i for i in struct.unpack(this.endian + "H"*numIdx, this.idxBuf[8:])] | |
| def prepareEntries(this): | |
| # make null igb->_refList | |
| # for each index: | |
| # get entry by index (sets _ref [to null for now], _index) | |
| # call vtable[0x1f](obj, igb) | |
| # memory: | |
| # if has alignmentTypeIndex, uses igb._alignmentList[alignmentTypeIndex] | |
| # otherwise, sets memType from igb._metaFieldListInFile[memTypeIndex], gets alignment from that | |
| # uses given or default _dataMemoryPool to do aligned alloc of _memSize bytes | |
| # object: | |
| # sets _typeMeta from igb._metaObjectListInFile[typeMetaIndex] or from _typeMetaName | |
| # allocates object from _objectMemoryPool, sets igb ref at _index | |
| this.refInfo = [] | |
| this.objects = [] | |
| for i in this.index: | |
| t, f = this.entries[i] | |
| n = this.mObjList[t][0] | |
| if n == b"igMemoryDirEntry": | |
| # _name, _memSize, _memTypeIndex, _refCounted, _alignmentTypeIndex, _memoryPoolHandle | |
| assert f[0][1] == 0, "_name should be null" | |
| this.refInfo.append(( | |
| False, | |
| ((f[2][1], this.mObjList[f[2][1]]) if f[2][1] != -1 else None) if f[2][1] < len(this.mObjList) else (f[2][1], b"???"), | |
| f[5][1], | |
| f[1][1], | |
| f[3][1], | |
| f[4][1])) | |
| this.objects.append(None) | |
| elif n == b"igObjectDirEntry": | |
| # _name, _typeIndex, _memoryPoolHandle | |
| assert f[0][1] == 0, "_name should be null" | |
| this.refInfo.append(( | |
| True, | |
| (f[1][1], this.mObjList[f[1][1]]), | |
| f[2][1])) | |
| this.objects.append(None) | |
| else: | |
| assert False, "Incorrect DirEntry type %r" % n | |
| def readInfoV4(this): | |
| # if hasInfo | |
| # read 4 bytes into _infoListPlace | |
| # get entry(_infoListPlace) | |
| # set _infoList to obj._ref | |
| if this.hasInfo: | |
| this.infoListPlace, = struct.unpack(this.endian + "I", this.v[:4]) | |
| this.v = this.v[4:] | |
| else: | |
| this.infoListPlace = None | |
| def readObjects(this): | |
| # prepare chunked read for objectBuffer | |
| # for each index, if is igObjectDirEntry: | |
| # get entry, call vtable[x20](obj, igb) # noop | |
| # for each index, if is igObjectDirEntry: | |
| # get entry, call vtable[x22](obj, igb) (x21 is V3) | |
| this.objBuf = this.v[:this.header[H_objBufferSize]] | |
| this.v = this.v[this.header[H_objBufferSize]:] | |
| dynBuf = this.objBuf | |
| this.backRefs = {} | |
| this.isObjListData = set() | |
| for i,r in enumerate(this.refInfo): | |
| if not r[0]: continue | |
| isObjList = b"igObjectList" in this.superchain(i) | |
| entType, entSize = struct.unpack(this.endian + "II", dynBuf[:8]) | |
| assert entType == r[1][0] | |
| entData = dynBuf[8:entSize] | |
| fieldinfo = this.mObjList[entType][3] | |
| entFields = [] | |
| this.objects[i] = entFields | |
| for name,tyidx,slot,size in fieldinfo: | |
| ty = None | |
| if name == b"Long": ty = "q" | |
| if name == b"UnsignedLong": ty = "Q" | |
| if name == b"Float": ty = "f" | |
| if name == b"Double": ty = "d" | |
| if name == b"Vec3f": ty = 3*"f" | |
| if name == b"Vec4f": ty = 4*"f" | |
| if name == b"Matrix44f": ty = 16*"f" | |
| if name == b"CharArray": ty = str(size)+"s" | |
| if name == b"UnsignedCharArray": ty = size*"B" | |
| if name == b"FloatArray": ty = (size//4)*"f" | |
| if name == b"IntArray": ty = (size//4)*"i" | |
| if b"Array" in name: assert ty is not None, (i, slot, name, size, entData.tobytes()) | |
| assert ty is not None or size in {1,2,4}, (i, slot, name, size, entData.tobytes()) | |
| val = struct.unpack(this.endian + (ty or {4:"i",2:"h",1:"B"}[size]), entData[:size]) | |
| if len(val) == 1: val, = val | |
| if name == b"String": | |
| size += val | |
| val = entData[4:size].tobytes().rstrip(b"\0").decode("sjis") | |
| if name in [b"ObjectRef", b"MemoryRef"]: | |
| if val != -1: | |
| assert 0 <= val < len(this.objects) | |
| this.backRefs.setdefault(val, set()) | |
| this.backRefs[val].add(i) | |
| if isObjList: this.isObjListData.add(val) | |
| elif r[1][1][0] == b"igVertexArray1_1" and slot == 2: this.isObjListData.add(val) | |
| #elif b"igDataList" in this.superchain(i): pass | |
| #elif name == b"MemoryRef" and (r[1][1][0], slot) not in UNKFIELDS: # XXX: DEBUG | |
| # UNKFIELDS.add((r[1][1][0], slot)) | |
| # print(i, r[1][1][0], slot, [f for f in [read_obj(mem,f) for f in read_internallist(mem,read_obj(mem, mObjList[mObjIdx[r[1][1][0]]])[1][b"_metaFields"][1])] if f[1][b"_internalIndex"]==slot][0]) | |
| entFields.append((slot, val, (name, tyidx))) | |
| assert not any(entData[size:(size+3)&~3].tolist()), (i, name, slot, entData[size:(size+3)&~3].tobytes()) | |
| entData = entData[(size+3)&~3:] | |
| assert len(entData) == 0 | |
| dynBuf = dynBuf[entSize:] | |
| assert len(dynBuf) == 0, len(dynBuf) | |
| def readMemoryRefs(this): # igb vtable[x21] | |
| # reset chunked read for memoryBuffer | |
| # for each index: | |
| # get entry, call vtable[x23](obj, igb) | |
| this.mrefBuf = this.v[:this.header[H_mrefBufferSize]] | |
| this.v = this.v[this.header[H_mrefBufferSize]:] | |
| dynBuf = this.mrefBuf | |
| for i,r in enumerate(this.refInfo): | |
| if r[0]: continue | |
| size = r[3] | |
| this.objects[i] = dynBuf[:size].tobytes() | |
| if i in this.isObjListData: | |
| for r in struct.unpack(this.endian + "i"*(size//4), this.objects[i]): | |
| if r == -1: continue | |
| assert 0 <= r < len(this.objects) | |
| this.backRefs.setdefault(r, set()) | |
| this.backRefs[r].add(i) | |
| dynBuf = dynBuf[(size+3)&~3:] | |
| assert len(dynBuf) == 0, len(dynBuf) | |
| def postProcess(this): | |
| # fail files/ndmw.afs.d/602, files/ndmw.afs.d/799 -- no infoList | |
| s = set(range(len(this.objects))) - set(this.backRefs) | |
| assert s == {this.infoListPlace} if this.infoListPlace is not None else s == set(), (s, {this.infoListPlace}) | |
| pass | |
| # END OF PARSING CODE | |
| def extract(this): | |
| import os | |
| basepath, _ = os.path.splitext(this.filename) | |
| i = this.infoListPlace | |
| seen = set() | |
| def rec(i, parent=None, listacc=None): | |
| if i in seen: return | |
| seen.add(i) | |
| k, t, obj = this[i] | |
| # handle known cases | |
| #if | |
| # otherwise, just recurse | |
| if not k and parent is not None and b"igDataList" in this.superchain(parent): | |
| d = this.readDataList(parent) | |
| print("[%d]: %r" % (i, d)) | |
| else: | |
| print("[%d]: "%i, end="") | |
| if k: | |
| for f in obj: | |
| if type(f) == tuple and f[2][0] in {b"MemoryRef", b"ObjectRef"}: | |
| if f[1] == -1: continue | |
| rec(f[1], i) | |
| elif i in this.isObjListData: | |
| listacc = {} | |
| for r in read_rawlist(obj, 0, len(obj)//4, "i", endian=this.endian): | |
| if r == -1: continue | |
| rec(r, listacc=listacc) | |
| rec(i) | |
| def getInfoDict(this, i): | |
| # igInfo resolution: | |
| # igInfo: return field by name match (always 4 bytes) | |
| # igNamedObjectInfo: if object with matching name exists, return it, otherwise default to igInfo | |
| # igSceneInfo: if scene's _name matches, return _sceneGraph, otherwise default to igInfo | |
| chain = this.superchain(i) | |
| assert this.refInfo[i][0] | |
| assert b"igInfo" in this.superchain(i) | |
| res = {} | |
| # FIELD NAMES UNAVAILABLE!!! NEED TO EXTRACT FROM BINARY!!! | |
| fields = {2: "_name", 3: "_directory", 4: "_resolved"} | |
| for s,f in enumerate(igb.refInfo[i][1][1][3]): | |
| name = fields.get(f[2]) | |
| if name is not None: | |
| res[name] = this.objects[i][s][1] | |
| if b"igSceneInfo" in this.superchain(i): | |
| res[this.getName(i)] = this.objects[i][2][1] | |
| if b"igNamedObjectInfo" in this.superchain(i): | |
| for o in this.parseList(i): | |
| res[this.getName(o)] = ("ref",o) | |
| return res | |
| def readDataList(this, i): | |
| assert this.refInfo[i][0] | |
| chain = this.superchain(i) | |
| assert b"igDataList" in chain | |
| if b"igNamedObjectList" in chain: return this.parseStringList(i) | |
| if b"igObjectList" in chain: return this.parseList(i) | |
| count, cap, ref = [f[1] for f in this.objects[i][:3]] | |
| assert count == cap | |
| if count == 0: return [] | |
| dty = this.refInfo[i][1][1][0] | |
| sty = None | |
| if dty in {b"igBoolList", b"igCharList"}: sty = "b" | |
| elif dty == b"igShortList": sty = "h" | |
| elif dty == b"igUnsignedShortList": sty = "H" | |
| elif dty == b"igIntList": sty = "i" | |
| elif dty == b"igLongList": sty = "q" | |
| elif dty == b"igFloatList": sty = "f" | |
| elif dty == b"igVec3fList": sty = "fff" | |
| elif dty == b"igQuaternionfList": sty = "ffff" | |
| elif dty == b"igBitMask": sty = "I" | |
| elif dty == b"igCharMetricsList": sty = "20H" | |
| else: assert False, ("Unknown list type:", dty) | |
| return read_rawlist(this.objects[ref], 0, count, sty, endian=this.endian) #list(struct.unpack(this.endian + count*sty, this.objects[ref])) | |
| def readDataBase(this, i, transpose=False): | |
| if this.refInfo[i][1][1][0] == b"beDBManagerInfo": | |
| i = this.objects[i][4][1] | |
| assert this.refInfo[i][0] | |
| assert this.refInfo[i][1][1][0] == b"beDataBase" | |
| cn, cd, hn, hd, ln, ld, fn, fd, bn, bd, sn, sd = [this.readDataList(f[1]) for f in this.objects[i]] | |
| ftypes = {} | |
| res = {} | |
| for i,n in enumerate(cn): ftypes[n], res[n] = "char", cd[i::len(cn)] | |
| for i,n in enumerate(hn): ftypes[n], res[n] = "short", hd[i::len(hn)] | |
| for i,n in enumerate(ln): ftypes[n], res[n] = "int", ld[i::len(ln)] | |
| for i,n in enumerate(fn): ftypes[n], res[n] = "float", fd[i::len(fn)] | |
| for i,n in enumerate(bn): ftypes[n], res[n] = "bool", bd[i::len(bn)] | |
| for i,n in enumerate(sn): ftypes[n], res[n] = "string",sd[i::len(sn)] | |
| if transpose: | |
| keys = list(ftypes.keys()) | |
| resd = [] | |
| for e in zip(*[res[k] for k in keys]): | |
| resd.append(dict(zip(keys, e))) | |
| return ftypes, resd | |
| return ftypes, res | |
| def __getitem__(this, i): | |
| return [this.refInfo[i][0], this.refInfo[i][1][1][0], this.objects[i]] | |
| def superchain(this, i): | |
| chain = [] | |
| ty = this.refInfo[i][1][0] | |
| while ty != None: | |
| chain.append(this.mObjList[ty][0]) | |
| ty = this.mObjList[ty][4] | |
| if ty is not None: ty = ty[1] | |
| return chain | |
| def getName(this, i): | |
| raw = this[i] | |
| assert raw[0] | |
| assert b"igNamedObject" in this.superchain(i) | |
| return raw[2][0][1] | |
| def parseList(this, i): | |
| raw = this[i] | |
| assert raw[0] | |
| assert b"igObjectList" in this.superchain(i) | |
| count, cap, ref = [f[1] for f in raw[2]] | |
| assert count == cap | |
| if count == 0: return [] | |
| return list(struct.unpack(this.endian + count*"I", this.objects[ref])) | |
| def parseStringList(this, i): | |
| raw = this[i] | |
| assert raw[0] | |
| assert raw[1] == b"igNamedObjectList" | |
| count, capacity, mem = [f[1] for f in raw[2]] | |
| assert count == capacity | |
| if count == 0: return [] | |
| return [this.getName(i) for i in struct.unpack("<" + count * "I", this[mem][2])] | |
| def parseFile(this, i): | |
| raw = this[i] | |
| assert raw[0] | |
| assert raw[1] == b"igAdxAfsFile" | |
| fname, dirlist, archive, idx = [f[1] for f in raw[2]] | |
| dirlist = this.parseStringList(dirlist) if dirlist != -1 else [] | |
| path = "/".join(dirlist + [fname]) | |
| return (archive, idx, path) | |
| def parseGenePlayerData(this, i): | |
| raw = this[i] | |
| assert raw[0] | |
| assert raw[1] == b"beGeneraterData" | |
| gname = raw[2][0][1] | |
| pdata = raw[2][12][1] | |
| pdata = [this.objects[p] for p in this.parseList(pdata)] | |
| pdata = ["%d/%d, %s - %s - %s" % ((p[0][1], p[1][1],) + tuple("%s/%s"%(this.getName(n), this.getName(l))for n,l in zip(this.readDataList(p[2][1]), this.readDataList(p[3][1])))) for p in pdata] | |
| return gname, pdata | |
| def pretty(this, i, dbfile=None, **kwargs): | |
| raw = this[i] | |
| if not raw[0]: | |
| if i in this.isObjListData: | |
| print("(%r):" % raw[1], read_rawlist(raw[2], 0, len(raw[2])//4, "i", endian=this.endian)) | |
| return | |
| print("(%r)" % raw[1]) | |
| return xdump(memoryview(raw[2]), 0, len(raw[2]), cols=32) | |
| assert raw[0] | |
| f = raw[2] | |
| if raw[1] == b"igNamedObject": | |
| print(repr(f[0][1])) | |
| return | |
| elif raw[1] == b"igAdxAfsFile": | |
| print("AFS%d_%04d: %s" % this.parseFile(i)) | |
| elif raw[1] == b"beGeneraterData": | |
| name, pdata = this.parseGenePlayerData(i) | |
| print("beGeneraterData: (%s)" % (name,)) | |
| for i,d in enumerate(pdata): | |
| print(" * %dplayer: %s" % (i+1, d)) | |
| eneId = raw[2][5][1] | |
| db = getdb("enemy", dbfile) | |
| if db: | |
| print(" * mob: %d: %s - %r" % (eneId, db[eneId]["MODEL"], dict(e for e in db[eneId].items() if e[1] not in {0, None}))) | |
| elif raw[1] == b"beGeneraterItemDataOne": | |
| name, _id, rank, rate = [f[1] for f in raw[2]] | |
| db = getdb(name, dbfile) | |
| acc = kwargs.get("listacc") | |
| if acc is not None: | |
| accval = acc.get("acc", 0) | |
| acctext = " [%05d-%05d]" % (accval, accval+rate) | |
| acc["acc"] = accval+rate | |
| else: | |
| acctext = "" | |
| if db: | |
| iname, txt = db[_id].get("NAME"), db[_id].get("TXT", db[_id].get("UP", "???")) | |
| print(("beGeneraterItemDataOne: %5d%s: (%s, %d) %s - %r" % (rate, acctext, name, _id, iname, txt)).replace("¶", "×").replace("?","[WPN]").replace("?","[AB1]").replace("?","[AB2]").replace("?","[CHP]").replace("?","[DRK]").replace("?","[ELE]").replace("?","[ICE]").replace("?","[FLM]")) | |
| else: | |
| print("beGeneraterItemDataOne: %5d%s: (%s, %d)" % (rate, acctext, name, _id)) | |
| return | |
| elif raw[1] == b"igImage": | |
| name = raw[2][20][1] | |
| print("igImage: %r" % (name,)) | |
| else: | |
| print(raw[1].decode("sjis") + ":") | |
| print("".join(i.decode("sjis") + ":" for i in this.superchain(i))) | |
| if "mem" in globals() and "mObjIdx" in globals() and raw[1] in mObjIdx: | |
| fields = [read_metafield(mem, f) for f in read_metaobj(mem, mObjList[mObjIdx[raw[1]]])[5]] | |
| fieldmap = {f[3]: f[1].decode("sjis") for f in fields} | |
| else: | |
| maxslot = max(f[0] for f in raw[2]) | |
| falign = b"03" if maxslot > 99 else (b"02" if maxslot > 9 else b"") | |
| fieldmap = {i: ("field%" + falign + "d") % i for i in range(maxslot+1)} | |
| fmaxlen = max(len(fieldmap[f[0]]) for f in raw[2]) | |
| talign = max(len(f[2][0]) for f in raw[2]) | |
| for f in raw[2]: | |
| print((" - %-" + str(fmaxlen) + "s: %-" + str(talign) + "s: %r" + (" <%s>" % (this.refInfo[f[1]][1][1][0].decode("sjis") if f[1] != -1 else "NULL",) if f[2][0] == b"ObjectRef" else "")) % (fieldmap[f[0]], f[2][0].decode("sjis"), f[1])) | |
| if raw[1] == b"igImage": | |
| px, py, pz, ps, order, br, bg, bb, ba, pfmt, isz, im, _, _, bint, clut, bidx, bpr, compr, bdpth, name = [f[1] for f in raw[2]] | |
| #assert br == bg == bb == ba == 0 | |
| if compr: | |
| assert bint == bidx == 0 | |
| assert bidx == 0 | |
| assert br == bg == bb == ba == 1 | |
| else: | |
| assert not compr | |
| assert bint == 0 | |
| assert bidx == 8 | |
| assert isz == px*py, (isz, pfmt, (px, py, pz, ps)) | |
| assert bpr == px*pz, (bpr, px, bpp) | |
| #assert pfmt == 65536 # compr: 14 | |
| if bidx: | |
| clutobj = this.objects[clut] | |
| clfmt, clentr, clstride, clut, clsz = [f[1] for f in clutobj] | |
| clut = this.objects[clut] | |
| bpp = bidx or br+bg+bb+ba | |
| assert bpp in {4, 8} | |
| s = "" | |
| im = memoryview(this.objects[im]) | |
| hasalpha = False | |
| for blend in [255, 0, None]: | |
| print() | |
| crow = [] | |
| if compr: px = (px+3)&~3 | |
| if compr: py = (py+3)&~3 | |
| for r in range(0,py,2): | |
| if compr and r&2==0: crow = [] | |
| s += " " | |
| for c in range(px): | |
| def rgb565(i): return [((i>>11)&0x1f)*255//0x1f,((i>>5)&0x3f)*255//0x3f,(i&0x1f)*255//0x1f] | |
| if compr: # DXT1 decoding | |
| if r&2==0 and c%4==0: | |
| coff = ((r//4)*(px//4)+(c//4))*8 | |
| cdat = im[coff:coff+8] | |
| a,b,ix = struct.unpack("<HHI", cdat) | |
| a,b=rgb565(a)+[255],rgb565(b)+[255] | |
| if b<a: clr = list(zip(*[[ac,bc,(ac*2+bc)//3,(ac+bc*2)//3] for ac,bc in zip(a,b)])) | |
| else: clr = list(zip(*[[ac,bc,(ac+bc)//2,0] for ac,bc in zip(a,b)])) | |
| crow.append([clr[(ix>>(i*2))&3]for i in range(16)]) | |
| ur,ug,ub,ua = crow[c//4][( r&2 )*4+(c%4)] | |
| dr,dg,db,da = crow[c//4][((r&2)+1)*4+(c%4)] | |
| else: # bitmap | |
| u, d = im[r*bpr+c:][:1].tolist()[0], im[(r+1)*bpr+c:][:1].tolist()[0] | |
| if bidx: | |
| ur,ug,ub,ua = memoryview(clut[u * clstride:u * clstride+4]).tolist() | |
| dr,dg,db,da = memoryview(clut[d * clstride:d * clstride+4]).tolist() | |
| else: | |
| ur,ug,ub,ua = u,u,u,255 | |
| dr,dg,db,da = d,d,d,255 | |
| if ua < 255 or da < 255: hasalpha = True | |
| if blend is None: # ignore alpha | |
| pass | |
| else: # blend | |
| # XXX: Incorrect, need gamma correction!!! | |
| ur = int(blend*(255-ua)/255+ur*ua/255) | |
| ug = int(blend*(255-ua)/255+ug*ua/255) | |
| ub = int(blend*(255-ua)/255+ub*ua/255) | |
| dr = int(blend*(255-da)/255+dr*da/255) | |
| dg = int(blend*(255-da)/255+dg*da/255) | |
| db = int(blend*(255-da)/255+db*da/255) | |
| s += "[48;2;%d;%d;%d;38;2;%d;%d;%dm?" % (ur,ug,ub, dr,dg,db) | |
| s += "[0m" | |
| print(s) | |
| s = "" | |
| if not hasalpha: break | |
| #import time | |
| #time.sleep(0.2) | |
| def orderedDump(this, i=None): | |
| if i is None: i = this.infoListPlace | |
| seen = set() | |
| def rec(i, parent=None, listacc=None): | |
| if i in seen: return | |
| seen.add(i) | |
| k, t, obj = this[i] | |
| if not k and parent is not None and b"igDataList" in this.superchain(parent): | |
| d = this.readDataList(parent) | |
| print("[%d]: %r" % (i, d)) | |
| else: | |
| print("[%d]: "%i, end="") | |
| r = this.pretty(i, listacc=listacc) | |
| if k: | |
| for f in obj: | |
| if type(f) == tuple and f[2][0] in {b"MemoryRef", b"ObjectRef"}: | |
| if f[1] == -1: continue | |
| if t == b"igImage" and f[0] in {13, 17} and r is not False: continue # do not print image or clut hexdump, already printed image to terminal | |
| rec(f[1], i) | |
| elif i in this.isObjListData: | |
| listacc = {} | |
| for r in read_rawlist(obj, 0, len(obj)//4, "i", endian=this.endian): | |
| if r == -1: continue | |
| rec(r, listacc=listacc) | |
| rec(i) | |
| def usage(): | |
| print("Usage: %s filename.igb [operation ...]" % (sys.argv[0] or "igbparse.py")) | |
| print(" extract [all] - used on ndmw.igb, unpacks .afs files") | |
| print(" dump [<file>] - prints out a recursive dump starting with the info list,") | |
| print(" if file is specified dumps that file using first file as database") | |
| #print(" drops <path/to/usa_pack.igb> - prints drop tables") | |
| print(" with no argument, loads the file for exploration (use python -i)") | |
| exit(1) | |
| if __name__ == "__main__": | |
| if len(sys.argv) < 2: usage() | |
| opargs = sys.argv[2:] | |
| igb = IGB(sys.argv[1]) | |
| import os | |
| try: | |
| with open(os.path.expanduser("~/.local/share/dolphin-emu/Dump/mem1.raw"), "rb") as f: mem = memoryview(f.read()) | |
| if mem[:6].tobytes() not in {b"GDJEB2", b"GDJJB2"}: | |
| print("WARNING: memory dump not from DW4/DWX! Will probably give wrong results!") | |
| mObjList = read_u32(mem, 0x80578008 + 0x18) | |
| mEnumList = read_u32(mem, 0x80578008 + 0x24) | |
| print(hex(mObjList), hex(mEnumList)) | |
| _, _, mObjList, mObjCount = getfields(mem, mObjList, "IIII") | |
| _, _, mEnumList, mEnumCount = getfields(mem, mEnumList, "IIII") | |
| assert mObjCount < 10000 and mEnumCount < 1000 | |
| mObjList = getfields(mem, mObjList, "I"*mObjCount) | |
| mEnumList = getfields(mem, mEnumList, "I"*mEnumCount) | |
| mObjIdx = {read_obj(mem, o)[1][b"_name"]: i for i,o in enumerate(mObjList)} | |
| mEnumIdx = {read_obj(mem, o)[1][b"_name"]: i for i,o in enumerate(mEnumList)} | |
| del _ | |
| rng = findRandom(mem) | |
| rngoffset = getRNGoffset(mem, rng) | |
| print("RNG is " + rngpretty(rngoffset)) | |
| except FileNotFoundError: | |
| pass | |
| if opargs: | |
| if opargs[0] == "extract": | |
| extractall = False | |
| if opargs[1:]: | |
| if opargs[1] == "all": | |
| extractall = True | |
| else: usage() | |
| if opargs[2:]: print("Ignoring arguments:", opargs[2:]) | |
| def stat(path): | |
| try: | |
| return os.stat(path) | |
| except FileNotFoundError: | |
| return None | |
| def mkdirp(path): | |
| if stat(path) is None: | |
| base, last = os.path.split(path) | |
| mkdirp(base) | |
| os.mkdir(path) | |
| def forcemove(pfrom, pto): | |
| if stat(pto): os.unlink(pto) | |
| os.rename(pfrom, pto) | |
| extractdir = "./extracted" | |
| rootdir = os.path.dirname(sys.argv[1]) | |
| afssetup, = [[f[1] for f in igb[r][2]] for r in igb.parseList(igb.infoListPlace) if igb[r][1] == b"beNDMWAfsSetupInfo"] | |
| root, resolved, filelist, afsdirs = afssetup | |
| mkdirp("%s/%s" % (extractdir, root)) | |
| extractdir = "%s/%s" % (extractdir, root) | |
| afslist = [] | |
| afsmeta = [] | |
| for afs in igb.parseList(afsdirs): | |
| name, count = [f[1] for f in igb.objects[afs]] | |
| with open("%s/%s" % (rootdir, name), "rb") as f: | |
| assert f.read(4) == b"AFS\0" | |
| afslist.append([]) | |
| fcount, = struct.unpack("<I", f.read(4)) | |
| assert count == fcount, (count, fcount) | |
| fdata = [struct.unpack("<II", f.read(8)) for i in range(fcount)] | |
| fmeta = struct.unpack("<II", f.read(8)) | |
| f.seek(fmeta[0]) | |
| fmeta = read_rawlist(f.read(fmeta[1]), 0, fcount, "4s28xIHHHHHH", "<") | |
| afsmeta.append(fmeta) | |
| for i in range(fcount): | |
| fname = "[%d.%d]" % (len(afslist)-1, len(afslist[-1])) | |
| print(fname) | |
| afslist[-1].append(fname) | |
| with open("%s/%s"%(extractdir, fname), "wb") as d: | |
| f.seek(fdata[i][0]) | |
| d.write(f.read(fdata[i][1])) | |
| def doextract(aidx, fidx): | |
| path = afslist[aidx][fidx] | |
| fullpath = "%s/%s" % (extractdir,path) | |
| fulledir, ext = os.path.splitext(fullpath) | |
| igb = IGB(fullpath) | |
| for i in range(len(igb.objects)): | |
| k, ty, obj = igb[i] | |
| if not k: continue | |
| if ty == b"igImage": | |
| px, py, pz, ps, order, br, bg, bb, ba, pfmt, isz, im, _, _, bint, clut, bidx, bpr, compr, bdpth, name = [f[1] for f in obj] | |
| name = name or "[%d].%s"%(i,"png" if not compr else "dds") | |
| mkdirp(fulledir) | |
| print("%s/%s" % (fulledir, name)) | |
| with open("%s/%s" % (fulledir, name), "wb") as fimg: | |
| if compr: | |
| assert name[-3:] != "png" | |
| header = struct.pack("<4s II III 4x I 44x II4s 40x", b"DDS ", 124, 0x81007, py, px, isz, 1, 32, 0x4, b"DXT1") # 1 is mipmap count, TODO | |
| fimg.write(header) | |
| fimg.write(igb.objects[im]) | |
| else: | |
| import png | |
| assert name[-3:] == "png" | |
| assert br == bg == bb == ba == bint == 0 and bidx == 8 and pz == ps == 1 | |
| assert px == bpr | |
| assert px * py == isz | |
| clutobj = igb.objects[clut] | |
| clfmt, clentr, clstride, clut, clsz = [f[1] for f in clutobj] | |
| clut = igb.objects[clut] | |
| im = igb.objects[im] | |
| pixeldata = [[e for i in memoryview(im[y*bpr:y*bpr+bpr]).tolist() for e in memoryview(clut[i*clstride:i*clstride+4]).tolist()] for y in range(py)] | |
| png.from_array(pixeldata, "RGBA").save(fimg) | |
| elif b"beCriSfpData" in igb.superchain(i): | |
| name, ver, data = [f[1] for f in obj[:3]] | |
| mkdirp("%s/s" % (fulledir,)) | |
| print("%s/s/%s" % (fulledir, name)) | |
| with open("%s/s/%s" % (fulledir, name), "wb") as f: | |
| f.write(igb.objects[data]) | |
| processed = set() | |
| for f in igb.parseList(filelist): | |
| aidx, fidx, path = igb.parseFile(f) | |
| print("[%d.%d] -> %s" % (aidx, fidx, path)) | |
| if fidx == -1: continue | |
| fullpath = "%s/%s" % (extractdir, path) | |
| mkdirp(os.path.split(fullpath)[0]) | |
| forcemove("%s/%s"%(extractdir,afslist[aidx][fidx]), fullpath) | |
| afslist[aidx][fidx] = path | |
| if extractall and path[-3:] != "sfd": doextract(aidx, fidx) | |
| if "/" not in path and ("stag" in path or "dng" in path): | |
| processed.add((aidx, fidx)) | |
| sigb = IGB(fullpath) | |
| flistinfo, = [[f[1] for f in sigb[r][2]] for r in sigb.parseList(sigb.infoListPlace) if sigb[r][1] == b"beFileListInfo"] | |
| _name, _resolved, section, fnames = flistinfo | |
| for n in sigb.readDataList(fnames): | |
| idxref, path = n[:n.index("]")+1], n[n.index("]")+1:] | |
| aidx, fidx = [int(p) for p in idxref[1:-1].split(".")] | |
| newpath = "%s/%s.igb" % (section.lower(), afslist[aidx][fidx]) | |
| print("%s -> %s" % (afslist[aidx][fidx], newpath)) | |
| mkdirp(os.path.split("%s/%s"%(extractdir,newpath))[0]) | |
| forcemove("%s/%s"%(extractdir,afslist[aidx][fidx]), "%s/%s"%(extractdir,newpath)) | |
| afslist[aidx][fidx] = newpath | |
| if extractall: doextract(aidx, fidx) | |
| elif opargs[0] == "dump": | |
| getdb("conItem", igb) | |
| if opargs[1:]: | |
| igbdb = igb | |
| igb = IGB(opargs[1]) | |
| if opargs[2:]: print("Ignoring arguments:", opargs[2:]) | |
| if len(igb.objects) == 0: | |
| print("<IGB FILE IS EMPTY>") | |
| else: | |
| igb.orderedDump() | |
| else: | |
| print("unknown operation:", opargs) | |
| usage() | |
| #a,b,c=-1,-1,-1 | |
| #for i,r in enumerate(predictRNG()): | |
| # a,b,c = b,c,r | |
| # if c%6000==0 and b%620+1<42: | |
| # m = b%620+1-10 | |
| # mcnt = 0 | |
| # if m==1: mcnt=5 | |
| # elif m<4: mcnt=4 | |
| # elif m<8: mcnt=3 | |
| # elif m<16: mcnt=2 | |
| # elif m<32: mcnt=1 | |
| # print("Call %9d (%12s): loot %05d, mod rng %2d, max bonus, (%d mods)" % (i-2, rngpretty(i-2), a%10000, b%620+1, mcnt)) | |
| #for i,o in enumerate(igb.objects): | |
| # if type(o) == type(b"") and len(o) > 10000: continue | |
| # if " 41" in repr(o):# .replace(", 613, ",""): | |
| # chnk = repr(o).split("41") | |
| # for l,c in zip(chnk,chnk[1:]): | |
| # if len(l) and len(c) and l[-1] not in "x.0123456789" and c[0] not in ".0123456789" and c[5:5+9] not in {"ObjectRef", "MemoryRef"}: | |
| # break | |
| # else: | |
| # continue | |
| # print(i, o) | |
| # generators | |
| #for l in igb.readDataList(igb.objects[igb.parseList(igb.infoListPlace)[0]][-1][1]): igb.pretty(l) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment