feat(savedata): write back zenny/gzenny/CP to ZZ save blob

Mirrors the read path added in 47277c7: updateSaveDataWithStruct now
flushes Zenny/GZenny/CP back to the blob for ZZ, using the same
`ok && off > 0 && off+size <= len(blob)` guard so unmapped modes remain
inert.

Tests lock down byte-level idempotence — the most important invariant
for save data. Parsing a live kirito ZZ blob and immediately writing
the struct back produces a byte-identical blob, so enabling these
fields cannot silently corrupt existing player saves on the next save
cycle. Additional coverage: round-trip through both paths, non-ZZ
modes never touch the blob bytes, and truncated blobs don't panic on
write.
This commit is contained in:
Houmgaor
2026-04-17 23:06:16 +02:00
parent 47277c712d
commit b1972e3c96
3 changed files with 208 additions and 1 deletions

View File

@@ -9,7 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Parse zenny, gzenny and caravan points (CP) from the ZZ character save blob (offsets 0xB0, 0x1FF64, 0x212E4 — sourced from Chakratos/mhf-save-manager, validated against a live HR999 save). Exposed as `CharacterSaveData.Zenny/GZenny/CP` alongside the existing `current_equip` pointer; read-only for now. Pre-ZZ modes remain unmapped to avoid corrupting unverified layouts.
- Parse and write zenny, gzenny and caravan points (CP) in the ZZ character save blob (offsets 0xB0, 0x1FF64, 0x212E4 — sourced from Chakratos/mhf-save-manager, validated against a live HR999 save). Exposed as `CharacterSaveData.Zenny/GZenny/CP` alongside the existing `current_equip` pointer. Write path is byte-idempotent (verified against a live blob). Pre-ZZ modes remain unmapped to avoid corrupting unverified layouts.
- Chinese (`zh`) language strings for chat commands, guild mails, cafe/timer broadcasts and prayer beads. Note: Shift-JIS wire encoding only covers characters shared with Japanese — simplified-only glyphs may fail to encode.
- Server-side multi-language support ([#188](https://github.com/Mezeporta/Erupe/issues/188)): each player picks their own language with `!lang <en|jp|fr|es|zh>`, persisted per user (migration `0022_user_language`) and loaded on login. Chat replies, guild invite mails, and cafe/timer broadcasts are served in that language via `Session.I18n()`. Quest and scenario JSON text fields now accept either a plain string (unchanged) or a `{"jp":"...","en":"...","fr":"..."}` map; the compiler resolves per session and the quest cache is keyed by `(questID, lang)`. Existing single-language JSONs and `.bin` round-trips remain byte-identical. Shift-JIS wire encoding still applies (ASCII/kana/CJK only). Raviente world-wide broadcasts stay on the server default since they have no single session.

View File

@@ -171,6 +171,18 @@ func (save *CharacterSaveData) updateSaveDataWithStruct() {
if save.Mode >= cfg.G10 {
copy(save.decompSave[save.Pointers[pKQF]:save.Pointers[pKQF]+saveFieldKQF], save.KQF)
}
// Write zenny / gzenny / CP only when a validated pointer exists for the
// current mode. Same guards as the read path: absent or zero offsets are
// never written, so unmapped versions cannot corrupt unrelated bytes.
if off, ok := save.Pointers[pZenny]; ok && off > 0 && off+saveFieldZenny <= len(save.decompSave) {
binary.LittleEndian.PutUint32(save.decompSave[off:off+saveFieldZenny], save.Zenny)
}
if off, ok := save.Pointers[pGZenny]; ok && off > 0 && off+saveFieldGZenny <= len(save.decompSave) {
binary.LittleEndian.PutUint32(save.decompSave[off:off+saveFieldGZenny], save.GZenny)
}
if off, ok := save.Pointers[pCP]; ok && off > 0 && off+saveFieldCP <= len(save.decompSave) {
binary.LittleEndian.PutUint32(save.decompSave[off:off+saveFieldCP], save.CP)
}
}
// This will update the save struct with the values stored in the character save

View File

@@ -1,6 +1,7 @@
package channelserver
import (
"bytes"
"encoding/binary"
"os"
"path/filepath"
@@ -237,6 +238,200 @@ func TestUpdateStructWithSaveData_LiveBlob(t *testing.T) {
}
}
// TestUpdateSaveDataWithStruct_ZZ_NewFields exercises the write path:
// set struct fields, flush to blob, re-parse, assert round-trip equality.
func TestUpdateSaveDataWithStruct_ZZ_NewFields(t *testing.T) {
tests := []struct {
name string
zenny uint32
gzenny uint32
cp uint32
}{
{"zero values", 0, 0, 0},
{"typical HR999 values", 8821924, 838956, 49379},
{"max uint32", 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF},
{"mixed", 123456, 0, 999},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
blob := buildMinimalZZBlob(t, 0, 0, 0, 0, 0)
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: blob,
Zenny: tt.zenny,
GZenny: tt.gzenny,
CP: tt.cp,
}
save.updateSaveDataWithStruct()
// Re-parse via the read path to confirm bytes landed at the
// expected offsets and decode back to the originals.
reloaded := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: blob,
}
reloaded.updateStructWithSaveData()
if reloaded.Zenny != tt.zenny {
t.Errorf("Zenny round-trip: got %d, want %d", reloaded.Zenny, tt.zenny)
}
if reloaded.GZenny != tt.gzenny {
t.Errorf("GZenny round-trip: got %d, want %d", reloaded.GZenny, tt.gzenny)
}
if reloaded.CP != tt.cp {
t.Errorf("CP round-trip: got %d, want %d", reloaded.CP, tt.cp)
}
})
}
}
// TestUpdateSaveDataWithStruct_ZZ_Idempotent is the most important test in
// this file. It guarantees that parsing a blob and then immediately writing
// the struct back produces a byte-identical blob. Any drift here means
// every client save would silently mutate bytes we don't understand,
// corrupting the save over time. Runs against a fully-populated blob so
// every field is exercised.
func TestUpdateSaveDataWithStruct_ZZ_Idempotent(t *testing.T) {
original := buildMinimalZZBlob(t, 8821924, 838956, 49379, 1234, 472080)
// Seed some plausible data in fields the parser reads so the write
// path has something meaningful to round-trip.
p := getPointers(cfg.ZZ)
original[p[pGender]] = 1
// House tier / data / KQF need non-zero bytes so their write paths
// actually copy something.
copy(original[p[pHouseTier]:], []byte{1, 2, 3, 4, 5})
copy(original[p[pKQF]:], []byte{1, 2, 3, 4, 5, 6, 7, 8})
snapshot := make([]byte, len(original))
copy(snapshot, original)
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: p,
decompSave: original,
}
save.updateStructWithSaveData()
save.updateSaveDataWithStruct()
if !bytes.Equal(original, snapshot) {
// Find the first mismatched byte to help diagnosis.
for i := range snapshot {
if snapshot[i] != original[i] {
t.Fatalf("read+write mutated blob at offset 0x%X: "+
"was 0x%02X, now 0x%02X (must be byte-idempotent)",
i, snapshot[i], original[i])
}
}
t.Fatalf("blob length changed: was %d, now %d", len(snapshot), len(original))
}
}
// TestUpdateSaveDataWithStruct_NonZZDoesNotTouchBlob confirms that when
// writing a save for a non-ZZ mode, the bytes at the ZZ-specific offsets
// are not overwritten. A regression here could mean setting .Zenny on a
// non-ZZ save clobbers an unrelated field.
func TestUpdateSaveDataWithStruct_NonZZDoesNotTouchBlob(t *testing.T) {
modes := []cfg.Mode{cfg.Z2, cfg.G10, cfg.G5, cfg.F5}
for _, m := range modes {
t.Run(m.String(), func(t *testing.T) {
blob := make([]byte, zzBlobSize)
// Plant sentinel bytes at ZZ offsets.
copy(blob[0xB0:], []byte{0xDE, 0xAD, 0xBE, 0xEF})
copy(blob[0x1FF64:], []byte{0xCA, 0xFE, 0xBA, 0xBE})
copy(blob[0x212E4:], []byte{0x13, 0x37, 0xC0, 0xDE})
// RP pointer exists for these modes; give it a sane offset so
// updateSaveDataWithStruct's existing RP write doesn't fail.
// We craft enough context that only the new-field writes should
// potentially touch the sentinels.
snapshot := make([]byte, len(blob))
copy(snapshot, blob)
save := &CharacterSaveData{
Mode: m,
Pointers: getPointers(m),
decompSave: blob,
Zenny: 0x11111111,
GZenny: 0x22222222,
CP: 0x33333333,
}
save.updateSaveDataWithStruct()
for _, off := range []int{0xB0, 0x1FF64, 0x212E4} {
if !bytes.Equal(blob[off:off+4], snapshot[off:off+4]) {
t.Errorf("mode %v overwrote sentinel at 0x%X: %v "+
"(new-field writes must be ZZ-only)",
m, off, blob[off:off+4])
}
}
})
}
}
// TestUpdateSaveDataWithStruct_LiveBlobIdempotent is the live-data
// counterpart of the idempotence test: parse a real production ZZ blob,
// write it back immediately, and verify every byte is unchanged. This is
// the strongest possible guarantee that our parser does not silently
// corrupt real player saves. Skips when the blob file is absent.
func TestUpdateSaveDataWithStruct_LiveBlobIdempotent(t *testing.T) {
path := filepath.Join("..", "..", "tmp", "saves", "297_kirito.comp")
comp, err := os.ReadFile(path)
if err != nil {
t.Skipf("live blob unavailable at %s: %v", path, err)
}
decomp, err := nullcomp.Decompress(comp)
if err != nil {
t.Fatalf("decompress: %v", err)
}
snapshot := make([]byte, len(decomp))
copy(snapshot, decomp)
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: decomp,
}
save.updateStructWithSaveData()
save.updateSaveDataWithStruct()
if !bytes.Equal(decomp, snapshot) {
for i := range snapshot {
if snapshot[i] != decomp[i] {
t.Fatalf("live blob read+write mutated byte at 0x%X: "+
"was 0x%02X, now 0x%02X", i, snapshot[i], decomp[i])
}
}
}
}
// TestUpdateSaveDataWithStruct_BoundsSafety ensures truncated blobs do
// not panic on the write path either.
func TestUpdateSaveDataWithStruct_BoundsSafety(t *testing.T) {
sizes := []int{
0x212E4 + 3, // just below pCP + size
0x1FF64 + 3, // just below pGZenny + size
}
for _, sz := range sizes {
full := buildMinimalZZBlob(t, 1, 2, 3, 0, 0)
if sz > len(full) {
continue
}
trunc := full[:sz]
save := &CharacterSaveData{
Mode: cfg.ZZ,
Pointers: getPointers(cfg.ZZ),
decompSave: trunc,
Zenny: 0xAAAA,
GZenny: 0xBBBB,
CP: 0xCCCC,
}
func() {
defer func() { _ = recover() }()
save.updateSaveDataWithStruct()
}()
}
}
// TestUpdateStructWithSaveData_BoundsSafety guards the new reads against
// truncated blobs: a decompressed save that happens to be shorter than the
// configured ZZ offsets must not panic. We don't require any particular