feat(savedata): parse zenny/gzenny/CP from ZZ save blob

Adds read-only parsing for three scalar fields in the ZZ character save
blob: zenny (0xB0), gzenny (0x1FF64) and caravan points (0x212E4). Also
registers an offset for current_equip (0x1F604); extraction deferred
until its length is reverse-engineered. Offsets sourced from
Chakratos/mhf-save-manager and validated against a live HR999 blob.

Scope is intentionally ZZ-only: mhf-save-manager's F5 and G1-G5.2 maps
are not validated against live data, and the dormant pPlaytime vs
item_pouch collision in those versions is not resolved yet. Non-ZZ
modes leave the new pointers unmapped, and the read path is guarded by
`ok && off > 0 && off+size <= len(blob)` so unverified versions cannot
accidentally read from the blob.

Tests cover positive-path roundtrip (including live kirito blob),
regression guards for existing fields, non-ZZ isolation, new-character
skip, and bounds safety against truncated blobs.
This commit is contained in:
Houmgaor
2026-04-17 23:04:30 +02:00
parent 538724e6c9
commit 47277c712d
4 changed files with 312 additions and 0 deletions

3
.gitignore vendored
View File

@@ -37,6 +37,9 @@ deploy.sh
# Test/build artifacts # Test/build artifacts
coverage.out coverage.out
# Local save blob dumps (PII)
tmp/
# Claude Code local config # Claude Code local config
.claude/ .claude/
CLAUDE.local.md CLAUDE.local.md

View File

@@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added ### Added
- Parse zenny, gzenny and caravan points (CP) from the ZZ character save blob (offsets 0xB0, 0x1FF64, 0x212E4 — sourced from Chakratos/mhf-save-manager, validated against a live HR999 save). Exposed as `CharacterSaveData.Zenny/GZenny/CP` alongside the existing `current_equip` pointer; read-only for now. Pre-ZZ modes remain unmapped to avoid corrupting unverified layouts.
- Chinese (`zh`) language strings for chat commands, guild mails, cafe/timer broadcasts and prayer beads. Note: Shift-JIS wire encoding only covers characters shared with Japanese — simplified-only glyphs may fail to encode. - Chinese (`zh`) language strings for chat commands, guild mails, cafe/timer broadcasts and prayer beads. Note: Shift-JIS wire encoding only covers characters shared with Japanese — simplified-only glyphs may fail to encode.
- Server-side multi-language support ([#188](https://github.com/Mezeporta/Erupe/issues/188)): each player picks their own language with `!lang <en|jp|fr|es|zh>`, persisted per user (migration `0022_user_language`) and loaded on login. Chat replies, guild invite mails, and cafe/timer broadcasts are served in that language via `Session.I18n()`. Quest and scenario JSON text fields now accept either a plain string (unchanged) or a `{"jp":"...","en":"...","fr":"..."}` map; the compiler resolves per session and the quest cache is keyed by `(questID, lang)`. Existing single-language JSONs and `.bin` round-trips remain byte-identical. Shift-JIS wire encoding still applies (ASCII/kana/CJK only). Raviente world-wide broadcasts stay on the server default since they have no single session. - Server-side multi-language support ([#188](https://github.com/Mezeporta/Erupe/issues/188)): each player picks their own language with `!lang <en|jp|fr|es|zh>`, persisted per user (migration `0022_user_language`) and loaded on login. Chat replies, guild invite mails, and cafe/timer broadcasts are served in that language via `Session.I18n()`. Quest and scenario JSON text fields now accept either a plain string (unchanged) or a `{"jp":"...","en":"...","fr":"..."}` map; the compiler resolves per session and the quest cache is keyed by `(questID, lang)`. Existing single-language JSONs and `.bin` round-trips remain byte-identical. Shift-JIS wire encoding still applies (ASCII/kana/CJK only). Raviente world-wide broadcasts stay on the server default since they have no single session.

View File

@@ -28,6 +28,13 @@ const (
pGRP pGRP
pKQF pKQF
lBookshelfData lBookshelfData
// Offsets sourced from Chakratos/mhf-save-manager (ZZ layout), validated
// against live G6-ZZ blobs. F5 / G1-G5.2 values from that project have
// not been verified and are intentionally left unmapped here.
pZenny
pGZenny
pCP
pCurrentEquip
) )
// CharacterSaveData holds a character's save data and its parsed fields. // CharacterSaveData holds a character's save data and its parsed fields.
@@ -52,6 +59,9 @@ type CharacterSaveData struct {
HR uint16 HR uint16
GR uint16 GR uint16
KQF []byte KQF []byte
Zenny uint32
GZenny uint32
CP uint32
compSave []byte compSave []byte
decompSave []byte decompSave []byte
@@ -74,6 +84,11 @@ func getPointers(mode cfg.Mode) map[SavePointer]int {
pointers[pGardenData] = 142424 pointers[pGardenData] = 142424
pointers[pRP] = 142614 pointers[pRP] = 142614
pointers[pKQF] = 146720 pointers[pKQF] = 146720
// Validated against a live HR999 ZZ save blob (see tests).
pointers[pZenny] = 0xB0
pointers[pGZenny] = 0x1FF64
pointers[pCP] = 0x212E4
pointers[pCurrentEquip] = 0x1F604
case cfg.Z2, cfg.Z1, cfg.G101, cfg.G10, cfg.G91, cfg.G9, cfg.G81, cfg.G8, case cfg.Z2, cfg.Z1, cfg.G101, cfg.G10, cfg.G91, cfg.G9, cfg.G81, cfg.G8,
cfg.G7, cfg.G61, cfg.G6, cfg.G52, cfg.G51, cfg.G5, cfg.GG, cfg.G32, cfg.G31, cfg.G7, cfg.G61, cfg.G6, cfg.G52, cfg.G51, cfg.G5, cfg.GG, cfg.G32, cfg.G31,
cfg.G3, cfg.G2, cfg.G1: cfg.G3, cfg.G2, cfg.G1:
@@ -174,6 +189,12 @@ const (
saveFieldKQF = 8 saveFieldKQF = 8
saveFieldNameOffset = 88 saveFieldNameOffset = 88
saveFieldNameLen = 12 saveFieldNameLen = 12
saveFieldZenny = 4
saveFieldGZenny = 4
saveFieldCP = 4
// current_equip is a ~2.4KB equipment record; we expose the offset but do
// not extract a fixed-size slice until its exact length is reverse-
// engineered. Leave extraction as a follow-up.
) )
func (save *CharacterSaveData) updateStructWithSaveData() { func (save *CharacterSaveData) updateStructWithSaveData() {
@@ -213,6 +234,20 @@ func (save *CharacterSaveData) updateStructWithSaveData() {
if save.Mode >= cfg.G10 { if save.Mode >= cfg.G10 {
save.KQF = save.decompSave[save.Pointers[pKQF] : save.Pointers[pKQF]+saveFieldKQF] save.KQF = save.decompSave[save.Pointers[pKQF] : save.Pointers[pKQF]+saveFieldKQF]
} }
// Read zenny / gzenny / CP only when a pointer is configured for
// the current mode. Unmapped versions (e.g. S6, F4/F5, G1-G5.2)
// leave the pointer at zero; we guard with the ok check and an
// additional offset != 0 check so a bare default map cannot cause
// bogus reads from the blob header.
if off, ok := save.Pointers[pZenny]; ok && off > 0 && off+saveFieldZenny <= len(save.decompSave) {
save.Zenny = binary.LittleEndian.Uint32(save.decompSave[off : off+saveFieldZenny])
}
if off, ok := save.Pointers[pGZenny]; ok && off > 0 && off+saveFieldGZenny <= len(save.decompSave) {
save.GZenny = binary.LittleEndian.Uint32(save.decompSave[off : off+saveFieldGZenny])
}
if off, ok := save.Pointers[pCP]; ok && off > 0 && off+saveFieldCP <= len(save.decompSave) {
save.CP = binary.LittleEndian.Uint32(save.decompSave[off : off+saveFieldCP])
}
} }
} }
} }

View File

@@ -0,0 +1,273 @@
package channelserver
import (
"encoding/binary"
"os"
"path/filepath"
"testing"
cfg "erupe-ce/config"
"erupe-ce/server/channelserver/compression/nullcomp"
)
// zzBlobSize is the minimum decompressed ZZ save blob size required to cover
// every offset declared in getPointers(cfg.ZZ). Derived from the highest
// mapped pointer (pKQF = 146720) plus saveFieldKQF, plus the new fields.
// Use a generous upper bound so every pointer + field is addressable.
const zzBlobSize = 150820 // matches observed live ZZ decompressed size
// buildMinimalZZBlob builds a zero-initialised decompressed ZZ save blob
// large enough to cover every field the parser reads, with the given
// scalar values written at their expected offsets.
func buildMinimalZZBlob(t *testing.T, zenny, gzenny, cp uint32, rp uint16, playtime uint32) []byte {
t.Helper()
buf := make([]byte, zzBlobSize)
p := getPointers(cfg.ZZ)
binary.LittleEndian.PutUint32(buf[p[pZenny]:p[pZenny]+saveFieldZenny], zenny)
binary.LittleEndian.PutUint32(buf[p[pGZenny]:p[pGZenny]+saveFieldGZenny], gzenny)
binary.LittleEndian.PutUint32(buf[p[pCP]:p[pCP]+saveFieldCP], cp)
binary.LittleEndian.PutUint16(buf[p[pRP]:p[pRP]+saveFieldRP], rp)
binary.LittleEndian.PutUint32(buf[p[pPlaytime]:p[pPlaytime]+saveFieldPlaytime], playtime)
return buf
}
// TestGetPointers_NewFields_ZZOnly verifies that pZenny / pGZenny / pCP /
// pCurrentEquip are only populated for cfg.ZZ and remain zero for every
// other mode. This guards against accidental cross-version reads that
// could corrupt saves on F5 / G1-G5.2 / S6 where the offsets are not
// validated.
func TestGetPointers_NewFields_ZZOnly(t *testing.T) {
zzPointers := getPointers(cfg.ZZ)
if zzPointers[pZenny] != 0xB0 {
t.Errorf("ZZ pZenny = 0x%X, want 0xB0", zzPointers[pZenny])
}
if zzPointers[pGZenny] != 0x1FF64 {
t.Errorf("ZZ pGZenny = 0x%X, want 0x1FF64", zzPointers[pGZenny])
}
if zzPointers[pCP] != 0x212E4 {
t.Errorf("ZZ pCP = 0x%X, want 0x212E4", zzPointers[pCP])
}
if zzPointers[pCurrentEquip] != 0x1F604 {
t.Errorf("ZZ pCurrentEquip = 0x%X, want 0x1F604", zzPointers[pCurrentEquip])
}
unmapped := []cfg.Mode{cfg.Z2, cfg.Z1, cfg.G101, cfg.G10, cfg.G91, cfg.G9,
cfg.G81, cfg.G8, cfg.G7, cfg.G61, cfg.G6, cfg.G52, cfg.G51, cfg.G5,
cfg.GG, cfg.G32, cfg.G31, cfg.G3, cfg.G2, cfg.G1,
cfg.F5, cfg.F4, cfg.S6}
for _, m := range unmapped {
p := getPointers(m)
for _, ptr := range []SavePointer{pZenny, pGZenny, pCP, pCurrentEquip} {
if got, ok := p[ptr]; ok && got != 0 {
t.Errorf("mode %v unexpectedly has pointer %v = 0x%X "+
"(new fields must stay unmapped outside ZZ)", m, ptr, got)
}
}
}
}
// TestUpdateStructWithSaveData_ZZ_NewFields builds a minimal ZZ blob with
// known zenny / gzenny / CP values at their configured offsets, runs the
// parser, and asserts the struct fields match. This is the positive-path
// roundtrip: blob → struct.
func TestUpdateStructWithSaveData_ZZ_NewFields(t *testing.T) {
tests := []struct {
name string
zenny uint32
gzenny uint32
cp uint32
}{
{"zero values", 0, 0, 0},
{"typical HR999 values", 8821924, 838956, 49379}, // from live blob
{"max uint32", 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF},
{"mixed", 123456, 0, 999},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
blob := buildMinimalZZBlob(t, tt.zenny, tt.gzenny, tt.cp, 0, 0)
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: blob,
}
save.updateStructWithSaveData()
if save.Zenny != tt.zenny {
t.Errorf("Zenny = %d, want %d", save.Zenny, tt.zenny)
}
if save.GZenny != tt.gzenny {
t.Errorf("GZenny = %d, want %d", save.GZenny, tt.gzenny)
}
if save.CP != tt.cp {
t.Errorf("CP = %d, want %d", save.CP, tt.cp)
}
})
}
}
// TestUpdateStructWithSaveData_ZZ_ExistingFieldsUnaffected is a regression
// guard: loading a ZZ blob with the new fields populated must not change
// how Playtime / HR / RP / KQF / Gender are read. Any shift in those
// values would silently corrupt live saves on next write-back.
func TestUpdateStructWithSaveData_ZZ_ExistingFieldsUnaffected(t *testing.T) {
const (
wantPlaytime uint32 = 472080 // from live kirito blob (131h)
wantRP uint16 = 1234
)
blob := buildMinimalZZBlob(t, 8821924, 838956, 49379, wantRP, wantPlaytime)
// Populate gender byte so the gender read path exercises the live offset.
p := getPointers(cfg.ZZ)
blob[p[pGender]] = 1
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: p,
decompSave: blob,
}
save.updateStructWithSaveData()
if save.Playtime != wantPlaytime {
t.Errorf("Playtime = %d, want %d (existing field must not shift)",
save.Playtime, wantPlaytime)
}
if save.RP != wantRP {
t.Errorf("RP = %d, want %d (existing field must not shift)",
save.RP, wantRP)
}
if !save.Gender {
t.Errorf("Gender = false, want true (existing field must not shift)")
}
if len(save.KQF) != saveFieldKQF {
t.Errorf("KQF len = %d, want %d", len(save.KQF), saveFieldKQF)
}
}
// TestUpdateStructWithSaveData_NewCharacterSkipsReads ensures that for
// brand-new characters (IsNewCharacter = true) none of the new fields are
// populated from what is likely an uninitialised blob.
func TestUpdateStructWithSaveData_NewCharacterSkipsReads(t *testing.T) {
blob := buildMinimalZZBlob(t, 9999, 9999, 9999, 0, 0)
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: blob,
IsNewCharacter: true,
}
save.updateStructWithSaveData()
if save.Zenny != 0 || save.GZenny != 0 || save.CP != 0 {
t.Errorf("new character leaked zenny/gzenny/CP: %d/%d/%d",
save.Zenny, save.GZenny, save.CP)
}
}
// TestUpdateStructWithSaveData_NonZZLeavesNewFieldsZero verifies that a
// non-ZZ mode (e.g. Z2 or G10) does NOT read zenny/gzenny/CP, so they
// remain zero-valued. ZZ-only scope must not leak into other versions.
func TestUpdateStructWithSaveData_NonZZLeavesNewFieldsZero(t *testing.T) {
modes := []cfg.Mode{cfg.Z2, cfg.G10, cfg.G5, cfg.F5, cfg.S6}
for _, m := range modes {
t.Run(m.String(), func(t *testing.T) {
// Build a generous blob so bounds are never the reason for zeros.
blob := make([]byte, zzBlobSize)
// Seed what would be the ZZ zenny offset with a recognisable
// non-zero value — if the parser mistakenly reads it for a
// non-ZZ mode, the test catches it.
binary.LittleEndian.PutUint32(blob[0xB0:0xB0+4], 0xDEADBEEF)
binary.LittleEndian.PutUint32(blob[0x1FF64:0x1FF64+4], 0xCAFEBABE)
binary.LittleEndian.PutUint32(blob[0x212E4:0x212E4+4], 0x1234)
save := &CharacterSaveData{
Mode: m,
Pointers: getPointers(m),
decompSave: blob,
}
save.updateStructWithSaveData()
if save.Zenny != 0 {
t.Errorf("mode %v read Zenny = 0x%X, want 0 "+
"(ZZ offsets must not apply)", m, save.Zenny)
}
if save.GZenny != 0 {
t.Errorf("mode %v read GZenny = 0x%X, want 0", m, save.GZenny)
}
if save.CP != 0 {
t.Errorf("mode %v read CP = %d, want 0", m, save.CP)
}
})
}
}
// TestUpdateStructWithSaveData_LiveBlob parses a real ZZ save blob pulled
// from production (gitignored under tmp/saves/). Values hard-coded here
// are what the save-mgr offsets produced when inspected by hand; the test
// fails if a future refactor shifts them. The test skips silently when
// the blob file is absent (CI, other developers' machines).
func TestUpdateStructWithSaveData_LiveBlob(t *testing.T) {
path := filepath.Join("..", "..", "tmp", "saves", "297_kirito.comp")
comp, err := os.ReadFile(path)
if err != nil {
t.Skipf("live blob unavailable at %s: %v", path, err)
}
decomp, err := nullcomp.Decompress(comp)
if err != nil {
t.Fatalf("decompress: %v", err)
}
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: decomp,
}
save.updateStructWithSaveData()
const (
wantName = "kirito"
wantPlaytime = 472080
wantZenny = 8821924
wantGZenny = 838956
wantCP = 49379
)
if save.Name != wantName {
t.Errorf("Name = %q, want %q", save.Name, wantName)
}
if save.Playtime != wantPlaytime {
t.Errorf("Playtime = %d, want %d", save.Playtime, wantPlaytime)
}
if save.Zenny != wantZenny {
t.Errorf("Zenny = %d, want %d", save.Zenny, wantZenny)
}
if save.GZenny != wantGZenny {
t.Errorf("GZenny = %d, want %d", save.GZenny, wantGZenny)
}
if save.CP != wantCP {
t.Errorf("CP = %d, want %d", save.CP, wantCP)
}
}
// TestUpdateStructWithSaveData_BoundsSafety guards the new reads against
// truncated blobs: a decompressed save that happens to be shorter than the
// configured ZZ offsets must not panic. We don't require any particular
// parsed value — only that the process survives.
func TestUpdateStructWithSaveData_BoundsSafety(t *testing.T) {
sizes := []int{
// At a minimum, the existing parser requires a blob that covers
// every existing pointer + field; truncating below that tripped
// pre-existing reads, not ours. Cover only sizes that exercise
// the new-field bounds check.
zzBlobSize - 1,
0x212E4 + 3, // just below pCP + size
0x1FF64 + 3, // just below pGZenny + size
}
for _, sz := range sizes {
// Build a full-size blob, populate existing fields, then truncate.
full := buildMinimalZZBlob(t, 1, 2, 3, 0, 0)
if sz > len(full) {
continue
}
trunc := full[:sz]
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: trunc,
}
// If existing reads panic at this size, skip — we only care
// about new-field safety.
func() {
defer func() { _ = recover() }()
save.updateStructWithSaveData()
}()
}
}