feat(scenario): add JSON scenario support and JKR type-3 compressor

Closes #172. Scenario files in bin/scenarios/ can now be authored as
.json instead of .bin — the server compiles them to wire format on
load, falling back to .bin if no .json is present.

- Add ParseScenarioBinary / CompileScenarioJSON in scenario_json.go;
  supports sub-header format (strings as UTF-8, metadata as base64),
  inline format, and raw JKR blobs.
- Add PackSimple JKR type-3 (LZ77) compressor in jpk_compress.go,
  ported from ReFrontier JPKEncodeLz.cs; round-trip tested against
  UnpackSimple.
- Fix off-by-one in processDecode (jpk.go): last literal byte was
  silently dropped for data that does not end on a back-reference.
- Wire loadScenarioBinary into handleMsgSysGetFile replacing the
  inline os.ReadFile call; mirrors the existing loadQuestBinary pattern.
- Rewrite docs/scenario-format.md with full container/sub-header spec
  and JSON schema examples.
This commit is contained in:
Houmgaor
2026-03-20 13:55:40 +01:00
parent 71b675bf3e
commit a1dfdd330a
8 changed files with 1226 additions and 27 deletions

View File

@@ -12,6 +12,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Event Tent (campaign) system: code redemption, stamp tracking, reward claiming, and quest gating for special event quests, backed by 8 new database tables and seeded with community-researched live-game campaign data ([#182](https://github.com/Mezeporta/Erupe/pull/182), by stratick).
- Database migration `0010_campaign` (campaigns, campaign_categories, campaign_category_links, campaign_rewards, campaign_rewards_claimed, campaign_state, campaign_codes, campaign_quest).
- JSON Hunting Road config: `bin/rengoku_data.json` is now supported as a human-readable alternative to the opaque `rengoku_data.bin` — the server assembles and ECD-encrypts the binary at startup, with `.bin` used as a fallback ([#173](https://github.com/Mezeporta/Erupe/issues/173)).
- JSON scenario files: `.json` files in `bin/scenarios/` are now supported alongside `.bin` — the server tries `.bin` first, then compiles `.json` on demand. Supports sub-header chunks (flags 0x01/0x02, strings UTF-8 → Shift-JIS, opaque metadata preserved as base64), inline episode listings (flag 0x08), and raw JKR blob chunks (flags 0x10/0x20) ([#172](https://github.com/Mezeporta/Erupe/issues/172)). A `ParseScenarioBinary` function allows existing `.bin` files to be exported to JSON. Fixed off-by-one in JPK decompressor that caused the last literal byte to be dropped.
- JKR type-3 (LZ77) compressor added (`common/decryption.PackSimple`), the inverse of `UnpackSimple`, ported from ReFrontier `JPKEncodeLz.cs` ([#172](https://github.com/Mezeporta/Erupe/issues/172)).
- JSON quest files: `.json` files in `bin/quests/` are now supported alongside `.bin` — the server tries `.bin` first (full backward compatibility), then compiles `.json` on the fly to the MHF binary wire format ([#160](https://github.com/Mezeporta/Erupe/issues/160)). Covers all binary sections: quest text (UTF-8 → Shift-JIS), all 12 objective types, monster spawns (large + minion), reward tables, supply box, loaded stages, rank requirements, variant flags, forced equipment, map sections, area transitions, coordinate mappings, map info, gathering points, gathering tables, and area facilities. A `ParseQuestBinary` reverse function allows existing `.bin` files to be inspected and exported to JSON.
### Fixed

View File

@@ -54,7 +54,7 @@ func ProcessDecode(data *byteframe.ByteFrame, outBuffer []byte) {
func (s *jpkState) processDecode(data *byteframe.ByteFrame, outBuffer []byte) {
outIndex := 0
for int(data.Index()) < len(data.Data()) && outIndex < len(outBuffer)-1 {
for int(data.Index()) < len(data.Data()) && outIndex < len(outBuffer) {
if s.bitShift(data) == 0 {
outBuffer[outIndex] = ReadByte(data)
outIndex++

View File

@@ -0,0 +1,169 @@
package decryption
import "encoding/binary"
// PackSimple compresses data using JPK type-3 (LZ77) compression and wraps it
// in a JKR header. It is the inverse of UnpackSimple.
func PackSimple(data []byte) []byte {
compressed := lzEncode(data)
out := make([]byte, 16+len(compressed))
binary.LittleEndian.PutUint32(out[0:4], 0x1A524B4A) // JKR magic
binary.LittleEndian.PutUint16(out[4:6], 0x0108) // version
binary.LittleEndian.PutUint16(out[6:8], 0x0003) // type 3 = LZ only
binary.LittleEndian.PutUint32(out[8:12], 0x00000010) // data offset = 16 (after header)
binary.LittleEndian.PutUint32(out[12:16], uint32(len(data)))
copy(out[16:], compressed)
return out
}
// lzEncoder holds mutable state for the LZ77 compression loop.
// Ported from ReFrontier JPKEncodeLz.cs.
//
// The format groups 8 items behind a flag byte (MSB = item 0):
//
// bit=0 → literal byte follows
// bit=1 → back-reference follows (with sub-cases below)
//
// Back-reference sub-cases:
//
// 10xx + 1 byte → length 36, offset ≤ 255
// 11 + 2 bytes → length 39, offset ≤ 8191 (length encoded in hi byte bits 75)
// 11 + 2 bytes + 0 + 4 bits → length 1025, offset ≤ 8191
// 11 + 2 bytes + 1 + 1 byte → length 26280, offset ≤ 8191
type lzEncoder struct {
flag byte
shiftIndex int
toWrite [1024]byte // data bytes for the current flag group
indexToWrite int
out []byte
}
func (e *lzEncoder) setFlag(value bool) {
if e.shiftIndex <= 0 {
e.flushFlag(false)
e.shiftIndex = 7
} else {
e.shiftIndex--
}
if value {
e.flag |= 1 << uint(e.shiftIndex)
}
}
// setFlagsReverse writes `count` bits of value MSB-first.
func (e *lzEncoder) setFlagsReverse(value byte, count int) {
for i := count - 1; i >= 0; i-- {
e.setFlag(((value >> uint(i)) & 1) == 1)
}
}
func (e *lzEncoder) writeByte(b byte) {
e.toWrite[e.indexToWrite] = b
e.indexToWrite++
}
func (e *lzEncoder) flushFlag(final bool) {
if !final || e.indexToWrite > 0 {
e.out = append(e.out, e.flag)
}
e.flag = 0
e.out = append(e.out, e.toWrite[:e.indexToWrite]...)
e.indexToWrite = 0
}
// lzEncode compresses data with the JPK LZ77 algorithm, producing the raw
// compressed bytes (without the JKR header).
func lzEncode(data []byte) []byte {
const (
compressionLevel = 280 // max match length
maxIndexDist = 0x300 // max look-back distance (768)
)
enc := &lzEncoder{shiftIndex: 8}
for pos := 0; pos < len(data); {
repLen, repOff := lzLongestRepetition(data, pos, compressionLevel, maxIndexDist)
if repLen == 0 {
// Literal byte
enc.setFlag(false)
enc.writeByte(data[pos])
pos++
} else {
enc.setFlag(true)
if repLen <= 6 && repOff <= 0xff {
// Short: flag=10, 2-bit length, 1-byte offset
enc.setFlag(false)
enc.setFlagsReverse(byte(repLen-3), 2)
enc.writeByte(byte(repOff))
} else {
// Long: flag=11, 2-byte offset/length header
enc.setFlag(true)
u16 := uint16(repOff)
if repLen <= 9 {
// Length fits in hi byte bits 7-5
u16 |= uint16(repLen-2) << 13
}
enc.writeByte(byte(u16 >> 8))
enc.writeByte(byte(u16 & 0xff))
if repLen > 9 {
if repLen <= 25 {
// Extended: flag=0, 4-bit length
enc.setFlag(false)
enc.setFlagsReverse(byte(repLen-10), 4)
} else {
// Extended: flag=1, 1-byte length
enc.setFlag(true)
enc.writeByte(byte(repLen - 0x1a))
}
}
}
pos += repLen
}
}
enc.flushFlag(true)
return enc.out
}
// lzLongestRepetition finds the longest match for data[pos:] in the look-back
// window. Returns (matchLen, encodedOffset) where encodedOffset is
// (pos - matchStart - 1). Returns (0, 0) when no usable match exists.
func lzLongestRepetition(data []byte, pos, compressionLevel, maxIndexDist int) (int, uint) {
const minLength = 3
// Clamp threshold to available bytes
threshold := compressionLevel
if remaining := len(data) - pos; remaining < threshold {
threshold = remaining
}
if pos == 0 || threshold < minLength {
return 0, 0
}
windowStart := pos - maxIndexDist
if windowStart < 0 {
windowStart = 0
}
maxLen := 0
var bestOffset uint
for left := windowStart; left < pos; left++ {
curLen := 0
for curLen < threshold && data[left+curLen] == data[pos+curLen] {
curLen++
}
if curLen >= minLength && curLen > maxLen {
maxLen = curLen
bestOffset = uint(pos - left - 1)
if maxLen >= threshold {
break
}
}
}
return maxLen, bestOffset
}

View File

@@ -0,0 +1,78 @@
package decryption
import (
"bytes"
"encoding/binary"
"testing"
)
func TestPackSimpleRoundTrip(t *testing.T) {
tests := []struct {
name string
data []byte
}{
{"single byte", []byte{0x42}},
{"ascii text", []byte("hello world")},
{"repeated pattern", bytes.Repeat([]byte{0xAB, 0xCD}, 100)},
{"all zeros", make([]byte, 256)},
{"all 0xFF", bytes.Repeat([]byte{0xFF}, 128)},
{"sequential bytes", func() []byte {
b := make([]byte, 256)
for i := range b {
b[i] = byte(i)
}
return b
}()},
{"long repeating run", bytes.Repeat([]byte("ABCDEFGH"), 50)},
{"mixed", []byte{0x00, 0x01, 0x02, 0xFF, 0xFE, 0xFD, 0x80, 0x81}},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
compressed := PackSimple(tc.data)
got := UnpackSimple(compressed)
if !bytes.Equal(got, tc.data) {
t.Errorf("round-trip mismatch\n want len=%d\n got len=%d", len(tc.data), len(got))
}
})
}
}
func TestPackSimpleHeader(t *testing.T) {
data := []byte("test data")
compressed := PackSimple(data)
if len(compressed) < 16 {
t.Fatalf("output too short: %d bytes", len(compressed))
}
magic := binary.LittleEndian.Uint32(compressed[0:4])
if magic != 0x1A524B4A {
t.Errorf("wrong magic: got 0x%08X, want 0x1A524B4A", magic)
}
jpkType := binary.LittleEndian.Uint16(compressed[6:8])
if jpkType != 3 {
t.Errorf("wrong type: got %d, want 3", jpkType)
}
decompSize := binary.LittleEndian.Uint32(compressed[12:16])
if decompSize != uint32(len(data)) {
t.Errorf("wrong decompressed size: got %d, want %d", decompSize, len(data))
}
}
func TestPackSimpleLargeRepeating(t *testing.T) {
// 4 KB of repeating pattern — should compress well
data := bytes.Repeat([]byte{0xAA, 0xBB, 0xCC, 0xDD}, 1024)
compressed := PackSimple(data)
if len(compressed) >= len(data) {
t.Logf("note: compressed (%d) not smaller than original (%d)", len(compressed), len(data))
}
got := UnpackSimple(compressed)
if !bytes.Equal(got, data) {
t.Errorf("round-trip failed for large repeating data")
}
}

View File

@@ -12,45 +12,172 @@ When `IsScenario == true`, the client sends a `scenarioFileIdentifier`:
| Offset | Type | Field | Description |
|--------|--------|-------------|-------------|
| 0 | uint8 | CategoryID | Scenario category |
| 0 | uint8 | CategoryID | Scenario category (0=Basic, 1=Veteran, 3=Other, 6=Pallone, 7=Diva) |
| 1 | uint32 | MainID | Main scenario identifier |
| 5 | uint8 | ChapterID | Chapter within the scenario |
| 6 | uint8 | Flags | Bit flags selecting chunk types (see below) |
The server constructs the filename as:
```
{CategoryID}_0_0_0_S{MainID}_T{Flags}_C{ChapterID}.bin (or .json)
```
## Flags (Chunk Type Selection)
The `Flags` byte is a bitmask that selects which chunk types the client requests:
| Bit | Value | Type | Recursive | Content |
|------|-------|---------|-----------|---------|
| 0 | 0x01 | Chunk0 | Yes | Quest name/description + 0x14 byte info block |
| 1 | 0x02 | Chunk1 | Yes | NPC dialog(?) + 0x2C byte info block |
| 2 | 0x04 | — | — | Unknown (no instances found; possibly Chunk2) |
| 3 | 0x08 | Chunk0 | No | Episode listing (0x1 prefixed?) |
| 4 | 0x10 | Chunk1 | No | JKR-compressed blob, NPC dialog(?) |
| 5 | 0x20 | Chunk2 | No | JKR-compressed blob, menu options or quest titles(?) |
| 6 | 0x40 | — | — | Unknown (no instances found) |
| 7 | 0x80 | — | — | Unknown (no instances found) |
| Bit | Value | Format | Content |
|-----|-------|-----------------|---------|
| 0 | 0x01 | Sub-header | Quest name/description (chunk0) |
| 1 | 0x02 | Sub-header | NPC dialog (chunk1) |
| 2 | 0x04 | — | Unknown (no instances found) |
| 3 | 0x08 | Inline | Episode listing (chunk0 inline) |
| 4 | 0x10 | JKR-compressed | NPC dialog blob (chunk1) |
| 5 | 0x20 | JKR-compressed | Menu options or quest titles (chunk2) |
| 6 | 0x40 | — | Unknown (no instances found) |
| 7 | 0x80 | — | Unknown (no instances found) |
### Chunk Types
The flags are part of the filename — each unique `(CategoryID, MainID, Flags, ChapterID)` tuple corresponds to its own file on disk.
- **Chunk0**: Contains text data (quest names, descriptions, episode titles) with an accompanying fixed-size info block.
- **Chunk1**: Contains dialog or narrative text with a larger info block (0x2C bytes).
- **Chunk2**: Contains menu/selection text.
## Container Format (big-endian)
### Recursive vs Non-Recursive
```
Offset Field
@0x00 u32 BE chunk0_size
@0x04 u32 BE chunk1_size
@0x08 bytes chunk0_data (chunk0_size bytes)
@0x08+c0 bytes chunk1_data (chunk1_size bytes)
@0x08+c0+c1 u32 BE chunk2_size (only present if file continues)
bytes chunk2_data (chunk2_size bytes)
```
- **Recursive chunks** (flags 0x01, 0x02): The chunk data itself contains nested sub-chunks that must be parsed recursively.
- **Non-recursive chunks** (flags 0x08, 0x10, 0x20): The chunk is a flat binary blob. Flags 0x10 and 0x20 are JKR-compressed and must be decompressed before reading.
The 8-byte header is always present. Chunks with size 0 are absent. Chunk2 is only read if at least 4 bytes remain after chunk0+chunk1.
## Response Format
## Chunk Formats
The server responds with the scenario file data via `doAckBufSucceed`. The response is the raw binary blob matching the requested chunk types. If the scenario file is not found, the server sends `doAckBufFail` to prevent a client crash.
### Sub-header Format (flags 0x01, 0x02)
## Current Implementation
Used for structured text chunks containing named strings with metadata.
Scenario files are loaded from `quests/scenarios/` on disk. The server currently serves them as opaque binary blobs with no parsing. Issue #172 proposes adding JSON/CSV support for easier editing, which would require implementing a parser/serializer for this format.
**Sub-header (8 bytes, fields at byte offsets within the chunk):**
| Off | Type | Field | Notes |
|-----|---------|--------------|-------|
| 0 | u8 | Type | Usually `0x01` |
| 1 | u8 | Pad | Always `0x00`; used to detect this format vs inline |
| 2 | u16 LE | TotalSize | Total chunk size including this header |
| 4 | u8 | EntryCount | Number of string entries |
| 5 | u8 | Unknown1 | Unknown; preserved in JSON for round-trip |
| 6 | u8 | MetadataSize | Total bytes of the metadata block that follows |
| 7 | u8 | Unknown2 | Unknown; preserved in JSON for round-trip |
**Layout after the 8-byte header:**
```
[MetadataSize bytes: opaque metadata block]
[null-terminated Shift-JIS string #1]
[null-terminated Shift-JIS string #2]
...
[0xFF end-of-strings sentinel]
```
**Metadata block** (partially understood):
The metadata block is `MetadataSize` bytes long and covers all entries collectively. Known sizes observed in real files:
- Chunk0 (flag 0x01): `MetadataSize = 0x14` (20 bytes)
- Chunk1 (flag 0x02): `MetadataSize = 0x2C` (44 bytes)
The internal structure of the metadata is not yet fully documented. It is preserved verbatim in the JSON format as a base64 blob so that clients receive correct values even for unknown fields.
**Format detection for chunk0:** if `chunk_data[1] == 0x00` → sub-header, else → inline.
### Inline Format (flag 0x08)
Used for episode listings. Each entry is:
```
{u8 index}{null-terminated Shift-JIS string}
```
Entries are sequential with no separator. Null bytes between entries are ignored during parsing.
### JKR-compressed Chunks (flags 0x10, 0x20)
Chunks with flags 0x10 (chunk1) and 0x20 (chunk2) are JKR-compressed blobs. The JKR header (magic `0x1A524B4A`) appears at the start of the chunk data.
The decompressed content contains metadata bytes interleaved with null-terminated Shift-JIS strings, but the detailed format is not yet fully documented. These chunks are stored as opaque base64 blobs in the JSON format and served to the client unchanged.
## JSON Format (for `.json` scenario files)
Erupe supports `.json` files in `bin/scenarios/` as an alternative to `.bin` files. The server compiles `.json` to wire format on demand. `.bin` takes priority if both exist.
Example `0_0_0_0_S102_T1_C0.json`:
```json
{
"chunk0": {
"subheader": {
"type": 1,
"unknown1": 0,
"unknown2": 0,
"metadata": "AAAAAAAAAAAAAAAAAAAAAAAAAAAA",
"strings": ["Quest Name", "Quest description goes here."]
}
}
}
```
Example with inline chunk0 (flag 0x08):
```json
{
"chunk0": {
"inline": [
{"index": 1, "text": "Chapter 1"},
{"index": 2, "text": "Chapter 2"}
]
}
}
```
Example with both chunk0 and chunk1:
```json
{
"chunk0": {
"subheader": {
"type": 1, "unknown1": 0, "unknown2": 0,
"metadata": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
"strings": ["Quest Name"]
}
},
"chunk1": {
"subheader": {
"type": 1, "unknown1": 0, "unknown2": 0,
"metadata": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
"strings": ["NPC: Welcome, hunter.", "NPC: Good luck!"]
}
}
}
```
**Key fields:**
- `metadata`: Base64-encoded opaque blob. Copy from `ParseScenarioBinary` output. For new scenarios with zero-filled metadata, use a base64 string of the right number of zero bytes.
- `strings`: UTF-8 text. The compiler converts to Shift-JIS on the wire.
- `chunk2.data`: Raw JKR-compressed bytes, base64-encoded. Copy from the original `.bin` file.
## JKR Compression
Chunks with flags 0x10 and 0x20 use JPK compression (magic bytes `0x1A524B4A`). See the ReFrontier tool for decompression utilities.
Chunks with flags 0x10 and 0x20 use JKR compression (magic `0x1A524B4A`, type 3 LZ77). The Go compressor is in `common/decryption.PackSimple` and the decompressor in `common/decryption.UnpackSimple`. These implement type-3 (LZ-only) compression, which is the format used throughout Erupe.
Type-4 (HFI = Huffman + LZ77) JKR blobs from real game files pass through as opaque base64 in `.json` — the server serves them as-is without re-compression.
## Implementation
- **Handler**: `server/channelserver/handlers_quest.go``handleMsgSysGetFile``loadScenarioBinary`
- **JSON schema + compiler**: `server/channelserver/scenario_json.go`
- **JKR compressor**: `common/decryption/jpk_compress.go` (`PackSimple`)
- **JKR decompressor**: `common/decryption/jpk.go` (`UnpackSimple`)

View File

@@ -106,10 +106,9 @@ func handleMsgSysGetFile(s *Session, p mhfpacket.MHFPacket) {
)
}
filename := fmt.Sprintf("%d_0_0_0_S%d_T%d_C%d", pkt.ScenarioIdentifer.CategoryID, pkt.ScenarioIdentifer.MainID, pkt.ScenarioIdentifer.Flags, pkt.ScenarioIdentifer.ChapterID)
// Read the scenario file.
data, err := os.ReadFile(filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("scenarios/%s.bin", filename)))
data, err := loadScenarioBinary(s, filename)
if err != nil {
s.logger.Error("Failed to open scenario file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", filename))
s.logger.Error("Failed to open scenario file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", filename), zap.Error(err))
doAckBufFail(s, pkt.AckHandle, nil)
return
}
@@ -168,6 +167,26 @@ func loadQuestBinary(s *Session, filename string) ([]byte, error) {
return compiled, nil
}
// loadScenarioBinary loads a scenario file by name, trying .bin first then .json.
// For .json files it compiles the JSON to the MHF binary wire format.
func loadScenarioBinary(s *Session, filename string) ([]byte, error) {
base := filepath.Join(s.server.erupeConfig.BinPath, "scenarios", filename)
if data, err := os.ReadFile(base + ".bin"); err == nil {
return data, nil
}
jsonData, err := os.ReadFile(base + ".json")
if err != nil {
return nil, err
}
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
return nil, fmt.Errorf("compile scenario JSON %s: %w", filename, err)
}
return compiled, nil
}
func seasonConversion(s *Session, questFile string) string {
// Try the seasonal override file (e.g., 00001d2 for season 2)
filename := fmt.Sprintf("%s%d", questFile[:6], s.server.Season())

View File

@@ -0,0 +1,432 @@
package channelserver
import (
"bytes"
"encoding/base64"
"encoding/binary"
"encoding/json"
"fmt"
"golang.org/x/text/encoding/japanese"
"golang.org/x/text/transform"
)
// ── JSON schema types ────────────────────────────────────────────────────────
// ScenarioJSON is the open, human-editable representation of a scenario .bin file.
// Strings are stored as UTF-8; the compiler converts to/from Shift-JIS.
//
// Container layout (big-endian sizes):
//
// @0x00: u32 BE chunk0_size
// @0x04: u32 BE chunk1_size
// [chunk0_data]
// [chunk1_data]
// u32 BE chunk2_size (only present when non-zero)
// [chunk2_data]
type ScenarioJSON struct {
// Chunk0 holds quest name/description data (sub-header or inline format).
Chunk0 *ScenarioChunk0JSON `json:"chunk0,omitempty"`
// Chunk1 holds NPC dialog data (sub-header format or raw JKR blob).
Chunk1 *ScenarioChunk1JSON `json:"chunk1,omitempty"`
// Chunk2 holds JKR-compressed menu/title data.
Chunk2 *ScenarioRawChunkJSON `json:"chunk2,omitempty"`
}
// ScenarioChunk0JSON represents chunk0, which is either sub-header or inline format.
// Exactly one of Subheader/Inline is non-nil.
type ScenarioChunk0JSON struct {
Subheader *ScenarioSubheaderJSON `json:"subheader,omitempty"`
Inline []ScenarioInlineEntry `json:"inline,omitempty"`
}
// ScenarioChunk1JSON represents chunk1, which is either sub-header or raw JKR.
// Exactly one of Subheader/JKR is non-nil.
type ScenarioChunk1JSON struct {
Subheader *ScenarioSubheaderJSON `json:"subheader,omitempty"`
JKR *ScenarioRawChunkJSON `json:"jkr,omitempty"`
}
// ScenarioSubheaderJSON represents a chunk in sub-header format.
//
// Sub-header binary layout (8 bytes, little-endian where applicable):
//
// @0: u8 Type (usually 0x01)
// @1: u8 0x00 (pad; distinguishes this format from inline)
// @2: u16 Size (total chunk size including this header)
// @4: u8 Count (number of string entries)
// @5: u8 Unknown1
// @6: u8 MetaSize (total bytes of metadata block)
// @7: u8 Unknown2
// [MetaSize bytes: opaque metadata (string IDs, offsets, flags — partially unknown)]
// [null-terminated Shift-JIS strings, one per entry]
// [0xFF end-of-strings sentinel]
type ScenarioSubheaderJSON struct {
// Type is the chunk type byte (almost always 0x01).
Type uint8 `json:"type"`
Unknown1 uint8 `json:"unknown1"`
Unknown2 uint8 `json:"unknown2"`
// Metadata is the opaque metadata block, base64-encoded.
// Preserving it unchanged ensures correct client behavior for fields
// whose meaning is not yet fully understood.
Metadata string `json:"metadata"`
// Strings contains the human-editable text (UTF-8).
Strings []string `json:"strings"`
}
// ScenarioInlineEntry is one entry in an inline-format chunk0.
// Format on wire: {u8 index}{Shift-JIS string}{0x00}.
type ScenarioInlineEntry struct {
Index uint8 `json:"index"`
Text string `json:"text"`
}
// ScenarioRawChunkJSON stores a JKR-compressed chunk as its raw compressed bytes.
// The data is served to the client as-is; the format of the decompressed content
// is not yet fully documented.
type ScenarioRawChunkJSON struct {
// Data is the raw JKR-compressed bytes, base64-encoded.
Data string `json:"data"`
}
// ── Parse: binary → JSON ─────────────────────────────────────────────────────
// ParseScenarioBinary reads a scenario .bin file and returns a ScenarioJSON
// suitable for editing and re-compilation with CompileScenarioJSON.
func ParseScenarioBinary(data []byte) (*ScenarioJSON, error) {
if len(data) < 8 {
return nil, fmt.Errorf("scenario data too short: %d bytes", len(data))
}
c0Size := int(binary.BigEndian.Uint32(data[0:4]))
c1Size := int(binary.BigEndian.Uint32(data[4:8]))
result := &ScenarioJSON{}
// Chunk0
c0Off := 8
if c0Size > 0 {
if c0Off+c0Size > len(data) {
return nil, fmt.Errorf("chunk0 size %d overruns data at offset %d", c0Size, c0Off)
}
chunk0, err := parseScenarioChunk0(data[c0Off : c0Off+c0Size])
if err != nil {
return nil, fmt.Errorf("chunk0: %w", err)
}
result.Chunk0 = chunk0
}
// Chunk1
c1Off := c0Off + c0Size
if c1Size > 0 {
if c1Off+c1Size > len(data) {
return nil, fmt.Errorf("chunk1 size %d overruns data at offset %d", c1Size, c1Off)
}
chunk1, err := parseScenarioChunk1(data[c1Off : c1Off+c1Size])
if err != nil {
return nil, fmt.Errorf("chunk1: %w", err)
}
result.Chunk1 = chunk1
}
// Chunk2 (preceded by its own 4-byte size field)
c2HdrOff := c1Off + c1Size
if c2HdrOff+4 <= len(data) {
c2Size := int(binary.BigEndian.Uint32(data[c2HdrOff : c2HdrOff+4]))
if c2Size > 0 {
c2DataOff := c2HdrOff + 4
if c2DataOff+c2Size > len(data) {
return nil, fmt.Errorf("chunk2 size %d overruns data at offset %d", c2Size, c2DataOff)
}
result.Chunk2 = &ScenarioRawChunkJSON{
Data: base64.StdEncoding.EncodeToString(data[c2DataOff : c2DataOff+c2Size]),
}
}
}
return result, nil
}
// parseScenarioChunk0 auto-detects sub-header vs inline format.
// The second byte being 0x00 is the pad byte in sub-headers; non-zero means inline.
func parseScenarioChunk0(data []byte) (*ScenarioChunk0JSON, error) {
if len(data) < 2 {
return &ScenarioChunk0JSON{}, nil
}
if data[1] == 0x00 {
sh, err := parseScenarioSubheader(data)
if err != nil {
return nil, err
}
return &ScenarioChunk0JSON{Subheader: sh}, nil
}
entries, err := parseScenarioInline(data)
if err != nil {
return nil, err
}
return &ScenarioChunk0JSON{Inline: entries}, nil
}
// parseScenarioChunk1 parses chunk1 as JKR or sub-header depending on magic bytes.
func parseScenarioChunk1(data []byte) (*ScenarioChunk1JSON, error) {
if len(data) >= 4 && binary.LittleEndian.Uint32(data[0:4]) == 0x1A524B4A {
return &ScenarioChunk1JSON{
JKR: &ScenarioRawChunkJSON{
Data: base64.StdEncoding.EncodeToString(data),
},
}, nil
}
sh, err := parseScenarioSubheader(data)
if err != nil {
return nil, err
}
return &ScenarioChunk1JSON{Subheader: sh}, nil
}
// parseScenarioSubheader parses the 8-byte sub-header + metadata + strings.
func parseScenarioSubheader(data []byte) (*ScenarioSubheaderJSON, error) {
if len(data) < 8 {
return nil, fmt.Errorf("sub-header chunk too short: %d bytes", len(data))
}
// Sub-header fields
chunkType := data[0]
// data[1] is the 0x00 pad (not stored; implicit)
// data[2:4] is the u16 LE total size (recomputed on compile)
entryCount := int(data[4])
unknown1 := data[5]
metaSize := int(data[6])
unknown2 := data[7]
metaEnd := 8 + metaSize
if metaEnd > len(data) {
return nil, fmt.Errorf("metadata block (size %d) overruns chunk (len %d)", metaSize, len(data))
}
metadata := base64.StdEncoding.EncodeToString(data[8:metaEnd])
strings, err := scenarioReadStrings(data, metaEnd, entryCount)
if err != nil {
return nil, err
}
return &ScenarioSubheaderJSON{
Type: chunkType,
Unknown1: unknown1,
Unknown2: unknown2,
Metadata: metadata,
Strings: strings,
}, nil
}
// parseScenarioInline parses chunk0 inline format: {u8 index}{Shift-JIS string}{0x00}.
func parseScenarioInline(data []byte) ([]ScenarioInlineEntry, error) {
var result []ScenarioInlineEntry
pos := 0
for pos < len(data) {
if data[pos] == 0x00 {
pos++
continue
}
idx := data[pos]
pos++
if pos >= len(data) {
break
}
end := pos
for end < len(data) && data[end] != 0x00 {
end++
}
if end > pos {
text, err := scenarioDecodeShiftJIS(data[pos:end])
if err != nil {
return nil, fmt.Errorf("inline entry at 0x%x: %w", pos, err)
}
result = append(result, ScenarioInlineEntry{Index: idx, Text: text})
}
pos = end + 1 // skip null terminator
}
return result, nil
}
// scenarioReadStrings scans for null-terminated Shift-JIS strings starting at
// offset start, reading at most maxCount strings (0 = unlimited). Stops on 0xFF.
func scenarioReadStrings(data []byte, start, maxCount int) ([]string, error) {
var result []string
pos := start
for pos < len(data) {
if maxCount > 0 && len(result) >= maxCount {
break
}
if data[pos] == 0x00 {
pos++
continue
}
if data[pos] == 0xFF {
break
}
end := pos
for end < len(data) && data[end] != 0x00 {
end++
}
if end > pos {
text, err := scenarioDecodeShiftJIS(data[pos:end])
if err != nil {
return nil, fmt.Errorf("string at 0x%x: %w", pos, err)
}
result = append(result, text)
}
pos = end + 1
}
return result, nil
}
// ── Compile: JSON → binary ───────────────────────────────────────────────────
// CompileScenarioJSON parses jsonData and compiles it to MHF scenario binary format.
func CompileScenarioJSON(jsonData []byte) ([]byte, error) {
var s ScenarioJSON
if err := json.Unmarshal(jsonData, &s); err != nil {
return nil, fmt.Errorf("unmarshal scenario JSON: %w", err)
}
return compileScenario(&s)
}
func compileScenario(s *ScenarioJSON) ([]byte, error) {
var chunk0, chunk1, chunk2 []byte
var err error
if s.Chunk0 != nil {
chunk0, err = compileScenarioChunk0(s.Chunk0)
if err != nil {
return nil, fmt.Errorf("chunk0: %w", err)
}
}
if s.Chunk1 != nil {
chunk1, err = compileScenarioChunk1(s.Chunk1)
if err != nil {
return nil, fmt.Errorf("chunk1: %w", err)
}
}
if s.Chunk2 != nil {
chunk2, err = compileScenarioRawChunk(s.Chunk2)
if err != nil {
return nil, fmt.Errorf("chunk2: %w", err)
}
}
var buf bytes.Buffer
// Container header: c0_size, c1_size (big-endian u32)
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk0)))
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk1)))
buf.Write(chunk0)
buf.Write(chunk1)
// Chunk2 preceded by its own size field
if len(chunk2) > 0 {
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk2)))
buf.Write(chunk2)
}
return buf.Bytes(), nil
}
func compileScenarioChunk0(c *ScenarioChunk0JSON) ([]byte, error) {
if c.Subheader != nil {
return compileScenarioSubheader(c.Subheader)
}
return compileScenarioInline(c.Inline)
}
func compileScenarioChunk1(c *ScenarioChunk1JSON) ([]byte, error) {
if c.JKR != nil {
return compileScenarioRawChunk(c.JKR)
}
if c.Subheader != nil {
return compileScenarioSubheader(c.Subheader)
}
return nil, nil
}
// compileScenarioSubheader builds the binary sub-header chunk:
// [8-byte header][metadata][null-terminated Shift-JIS strings][0xFF]
func compileScenarioSubheader(sh *ScenarioSubheaderJSON) ([]byte, error) {
meta, err := base64.StdEncoding.DecodeString(sh.Metadata)
if err != nil {
return nil, fmt.Errorf("decode metadata base64: %w", err)
}
var strBuf bytes.Buffer
for _, s := range sh.Strings {
sjis, err := scenarioEncodeShiftJIS(s)
if err != nil {
return nil, err
}
strBuf.Write(sjis) // sjis already has null terminator from helper
}
strBuf.WriteByte(0xFF) // end-of-strings sentinel
// Total size = 8-byte header + metadata + strings
totalSize := 8 + len(meta) + strBuf.Len()
var buf bytes.Buffer
buf.WriteByte(sh.Type)
buf.WriteByte(0x00) // pad (format detector)
// u16 LE total size
buf.WriteByte(byte(totalSize))
buf.WriteByte(byte(totalSize >> 8))
buf.WriteByte(byte(len(sh.Strings))) // entry count
buf.WriteByte(sh.Unknown1)
buf.WriteByte(byte(len(meta))) // metadata total size
buf.WriteByte(sh.Unknown2)
buf.Write(meta)
buf.Write(strBuf.Bytes())
return buf.Bytes(), nil
}
// compileScenarioInline builds the inline-format chunk0 bytes.
func compileScenarioInline(entries []ScenarioInlineEntry) ([]byte, error) {
var buf bytes.Buffer
for _, e := range entries {
buf.WriteByte(e.Index)
sjis, err := scenarioEncodeShiftJIS(e.Text)
if err != nil {
return nil, err
}
buf.Write(sjis) // includes null terminator
}
return buf.Bytes(), nil
}
// compileScenarioRawChunk decodes the base64 raw chunk bytes.
// These are served to the client as-is (no re-compression).
func compileScenarioRawChunk(rc *ScenarioRawChunkJSON) ([]byte, error) {
data, err := base64.StdEncoding.DecodeString(rc.Data)
if err != nil {
return nil, fmt.Errorf("decode raw chunk base64: %w", err)
}
return data, nil
}
// ── String helpers ───────────────────────────────────────────────────────────
// scenarioDecodeShiftJIS converts a raw Shift-JIS byte slice to UTF-8 string.
func scenarioDecodeShiftJIS(b []byte) (string, error) {
dec := japanese.ShiftJIS.NewDecoder()
out, _, err := transform.Bytes(dec, b)
if err != nil {
return "", fmt.Errorf("shift-jis decode: %w", err)
}
return string(out), nil
}
// scenarioEncodeShiftJIS converts a UTF-8 string to a null-terminated Shift-JIS byte slice.
func scenarioEncodeShiftJIS(s string) ([]byte, error) {
enc := japanese.ShiftJIS.NewEncoder()
out, _, err := transform.Bytes(enc, []byte(s))
if err != nil {
return nil, fmt.Errorf("shift-jis encode %q: %w", s, err)
}
return append(out, 0x00), nil
}

View File

@@ -0,0 +1,372 @@
package channelserver
import (
"bytes"
"encoding/binary"
"encoding/json"
"testing"
)
// ── test helpers ─────────────────────────────────────────────────────────────
// buildTestSubheaderChunk constructs a minimal sub-header format chunk.
// metadata is zero-filled to metaSize bytes.
func buildTestSubheaderChunk(t *testing.T, strings []string, metaSize int) []byte {
t.Helper()
var strBuf bytes.Buffer
for _, s := range strings {
sjis, err := scenarioEncodeShiftJIS(s)
if err != nil {
t.Fatalf("encode %q: %v", s, err)
}
strBuf.Write(sjis)
}
strBuf.WriteByte(0xFF) // end sentinel
totalSize := 8 + metaSize + strBuf.Len()
meta := make([]byte, metaSize) // zero metadata
var buf bytes.Buffer
buf.WriteByte(0x01) // type
buf.WriteByte(0x00) // pad
buf.WriteByte(byte(totalSize)) // size lo
buf.WriteByte(byte(totalSize >> 8)) // size hi
buf.WriteByte(byte(len(strings))) // entry count
buf.WriteByte(0x00) // unknown1
buf.WriteByte(byte(metaSize)) // metadata total
buf.WriteByte(0x00) // unknown2
buf.Write(meta)
buf.Write(strBuf.Bytes())
return buf.Bytes()
}
// buildTestInlineChunk constructs an inline-format chunk0.
func buildTestInlineChunk(t *testing.T, strings []string) []byte {
t.Helper()
var buf bytes.Buffer
for i, s := range strings {
buf.WriteByte(byte(i + 1)) // 1-based index
sjis, err := scenarioEncodeShiftJIS(s)
if err != nil {
t.Fatalf("encode %q: %v", s, err)
}
buf.Write(sjis)
}
return buf.Bytes()
}
// buildTestScenarioBinary assembles a complete scenario container for testing.
func buildTestScenarioBinary(t *testing.T, c0, c1 []byte) []byte {
t.Helper()
var buf bytes.Buffer
if err := binary.Write(&buf, binary.BigEndian, uint32(len(c0))); err != nil {
t.Fatal(err)
}
if err := binary.Write(&buf, binary.BigEndian, uint32(len(c1))); err != nil {
t.Fatal(err)
}
buf.Write(c0)
buf.Write(c1)
// c2 size = 0
if err := binary.Write(&buf, binary.BigEndian, uint32(0)); err != nil {
t.Fatal(err)
}
return buf.Bytes()
}
// extractStringsFromScenario parses a binary and returns all strings it contains.
func extractStringsFromScenario(t *testing.T, data []byte) []string {
t.Helper()
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("ParseScenarioBinary: %v", err)
}
var result []string
if s.Chunk0 != nil {
if s.Chunk0.Subheader != nil {
result = append(result, s.Chunk0.Subheader.Strings...)
}
for _, e := range s.Chunk0.Inline {
result = append(result, e.Text)
}
}
if s.Chunk1 != nil && s.Chunk1.Subheader != nil {
result = append(result, s.Chunk1.Subheader.Strings...)
}
return result
}
// ── parse tests ──────────────────────────────────────────────────────────────
func TestParseScenarioBinary_TooShort(t *testing.T) {
_, err := ParseScenarioBinary([]byte{0x00, 0x01})
if err == nil {
t.Error("expected error for short input")
}
}
func TestParseScenarioBinary_EmptyChunks(t *testing.T) {
data := buildTestScenarioBinary(t, nil, nil)
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if s.Chunk0 != nil || s.Chunk1 != nil || s.Chunk2 != nil {
t.Error("expected all chunks nil for empty scenario")
}
}
func TestParseScenarioBinary_SubheaderChunk0(t *testing.T) {
c0 := buildTestSubheaderChunk(t, []string{"Quest A", "Quest B"}, 4)
data := buildTestScenarioBinary(t, c0, nil)
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if s.Chunk0 == nil || s.Chunk0.Subheader == nil {
t.Fatal("expected chunk0 subheader")
}
got := s.Chunk0.Subheader.Strings
want := []string{"Quest A", "Quest B"}
if len(got) != len(want) {
t.Fatalf("string count: got %d, want %d", len(got), len(want))
}
for i := range want {
if got[i] != want[i] {
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
}
}
}
func TestParseScenarioBinary_InlineChunk0(t *testing.T) {
c0 := buildTestInlineChunk(t, []string{"Item1", "Item2"})
data := buildTestScenarioBinary(t, c0, nil)
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if s.Chunk0 == nil || len(s.Chunk0.Inline) == 0 {
t.Fatal("expected chunk0 inline entries")
}
want := []string{"Item1", "Item2"}
for i, e := range s.Chunk0.Inline {
if e.Text != want[i] {
t.Errorf("[%d]: got %q, want %q", i, e.Text, want[i])
}
}
}
func TestParseScenarioBinary_BothChunks(t *testing.T) {
c0 := buildTestSubheaderChunk(t, []string{"Quest"}, 4)
c1 := buildTestSubheaderChunk(t, []string{"NPC1", "NPC2"}, 8)
data := buildTestScenarioBinary(t, c0, c1)
strings := extractStringsFromScenario(t, data)
want := []string{"Quest", "NPC1", "NPC2"}
if len(strings) != len(want) {
t.Fatalf("string count: got %d, want %d", len(strings), len(want))
}
for i := range want {
if strings[i] != want[i] {
t.Errorf("[%d]: got %q, want %q", i, strings[i], want[i])
}
}
}
func TestParseScenarioBinary_Japanese(t *testing.T) {
c0 := buildTestSubheaderChunk(t, []string{"テスト", "日本語"}, 4)
data := buildTestScenarioBinary(t, c0, nil)
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
want := []string{"テスト", "日本語"}
got := s.Chunk0.Subheader.Strings
for i := range want {
if got[i] != want[i] {
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
}
}
}
// ── compile tests ─────────────────────────────────────────────────────────────
func TestCompileScenarioJSON_Subheader(t *testing.T) {
input := &ScenarioJSON{
Chunk0: &ScenarioChunk0JSON{
Subheader: &ScenarioSubheaderJSON{
Type: 0x01,
Unknown1: 0x00,
Unknown2: 0x00,
Metadata: "AAAABBBB", // base64 of 6 zero bytes
Strings: []string{"Hello", "World"},
},
},
}
jsonData, err := json.Marshal(input)
if err != nil {
t.Fatalf("marshal: %v", err)
}
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
t.Fatalf("CompileScenarioJSON: %v", err)
}
// Parse the compiled output and verify strings survive
result, err := ParseScenarioBinary(compiled)
if err != nil {
t.Fatalf("ParseScenarioBinary on compiled output: %v", err)
}
if result.Chunk0 == nil || result.Chunk0.Subheader == nil {
t.Fatal("expected chunk0 subheader in compiled output")
}
want := []string{"Hello", "World"}
got := result.Chunk0.Subheader.Strings
for i := range want {
if i >= len(got) || got[i] != want[i] {
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
}
}
}
func TestCompileScenarioJSON_Inline(t *testing.T) {
input := &ScenarioJSON{
Chunk0: &ScenarioChunk0JSON{
Inline: []ScenarioInlineEntry{
{Index: 1, Text: "Sword"},
{Index: 2, Text: "Shield"},
},
},
}
jsonData, _ := json.Marshal(input)
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
t.Fatalf("CompileScenarioJSON: %v", err)
}
result, err := ParseScenarioBinary(compiled)
if err != nil {
t.Fatalf("ParseScenarioBinary: %v", err)
}
if result.Chunk0 == nil || len(result.Chunk0.Inline) != 2 {
t.Fatal("expected 2 inline entries")
}
if result.Chunk0.Inline[0].Text != "Sword" {
t.Errorf("got %q, want Sword", result.Chunk0.Inline[0].Text)
}
if result.Chunk0.Inline[1].Text != "Shield" {
t.Errorf("got %q, want Shield", result.Chunk0.Inline[1].Text)
}
}
// ── round-trip tests ─────────────────────────────────────────────────────────
func TestScenarioRoundTrip_Subheader(t *testing.T) {
original := buildTestScenarioBinary(t,
buildTestSubheaderChunk(t, []string{"QuestName", "Description"}, 0x14),
buildTestSubheaderChunk(t, []string{"Dialog1", "Dialog2", "Dialog3"}, 0x2C),
)
s, err := ParseScenarioBinary(original)
if err != nil {
t.Fatalf("parse: %v", err)
}
jsonData, err := json.Marshal(s)
if err != nil {
t.Fatalf("marshal: %v", err)
}
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
t.Fatalf("compile: %v", err)
}
// Re-parse compiled and compare strings
wantStrings := []string{"QuestName", "Description", "Dialog1", "Dialog2", "Dialog3"}
gotStrings := extractStringsFromScenario(t, compiled)
if len(gotStrings) != len(wantStrings) {
t.Fatalf("string count: got %d, want %d", len(gotStrings), len(wantStrings))
}
for i := range wantStrings {
if gotStrings[i] != wantStrings[i] {
t.Errorf("[%d]: got %q, want %q", i, gotStrings[i], wantStrings[i])
}
}
}
func TestScenarioRoundTrip_Inline(t *testing.T) {
original := buildTestScenarioBinary(t,
buildTestInlineChunk(t, []string{"EpisodeA", "EpisodeB"}),
nil,
)
s, _ := ParseScenarioBinary(original)
jsonData, _ := json.Marshal(s)
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
t.Fatalf("compile: %v", err)
}
got := extractStringsFromScenario(t, compiled)
want := []string{"EpisodeA", "EpisodeB"}
for i := range want {
if i >= len(got) || got[i] != want[i] {
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
}
}
}
func TestScenarioRoundTrip_MetadataPreserved(t *testing.T) {
// The metadata block must survive parse → JSON → compile unchanged.
metaBytes := []byte{0x01, 0x02, 0x03, 0x04, 0xFF, 0xFE, 0xFD, 0xFC}
// Build a chunk with custom metadata and unknown field values by hand.
var buf bytes.Buffer
str := []byte("A\x00\xFF")
totalSize := 8 + len(metaBytes) + len(str)
buf.WriteByte(0x01)
buf.WriteByte(0x00)
buf.WriteByte(byte(totalSize))
buf.WriteByte(byte(totalSize >> 8))
buf.WriteByte(0x01) // entry count
buf.WriteByte(0xAA) // unknown1
buf.WriteByte(byte(len(metaBytes)))
buf.WriteByte(0xBB) // unknown2
buf.Write(metaBytes)
buf.Write(str)
c0 := buf.Bytes()
data := buildTestScenarioBinary(t, c0, nil)
s, err := ParseScenarioBinary(data)
if err != nil {
t.Fatalf("parse: %v", err)
}
sh := s.Chunk0.Subheader
if sh.Type != 0x01 || sh.Unknown1 != 0xAA || sh.Unknown2 != 0xBB {
t.Errorf("header fields: type=%02X unk1=%02X unk2=%02X", sh.Type, sh.Unknown1, sh.Unknown2)
}
// Compile and parse again — metadata must survive
jsonData, _ := json.Marshal(s)
compiled, err := CompileScenarioJSON(jsonData)
if err != nil {
t.Fatalf("compile: %v", err)
}
s2, err := ParseScenarioBinary(compiled)
if err != nil {
t.Fatalf("re-parse: %v", err)
}
sh2 := s2.Chunk0.Subheader
if sh2.Metadata != sh.Metadata {
t.Errorf("metadata changed:\n before: %s\n after: %s", sh.Metadata, sh2.Metadata)
}
if sh2.Unknown1 != sh.Unknown1 || sh2.Unknown2 != sh.Unknown2 {
t.Errorf("unknown fields changed: unk1 %02X→%02X unk2 %02X→%02X",
sh.Unknown1, sh2.Unknown1, sh.Unknown2, sh2.Unknown2)
}
}