mirror of
https://github.com/Mezeporta/Erupe.git
synced 2026-05-13 17:53:51 +02:00
chore(merge): merge develop into main for 9.4.0 cycle
Brings 53 develop commits (i18n, Diva, campaign, guild invites, save transfer, return/rookie guilds, hunting tournament, JSON quest/scenario loaders, Ghidra-derived user binary parsing, and misc fixes) onto main now that 9.3.2 has been tagged and released. Resolves two overlap zones: 1. Migration number collision. Main shipped 0010_fix_zero_rasta_id and 0011_fix_stale_boost_time in 9.3.2; develop had independently numbered 0010_campaign..0015_tournament. The migration runner keys applied versions by integer, so coexisting files with the same numeric prefix would silently skip each other. Develop's files have been renumbered to 0016..0021, leaving main's 0010/0011 intact. A schema_version rename script is required on any server that had already applied the old develop numbers (only frontier.mogapedia.fr at the time of this merge). 2. CHANGELOG.md. Develop's in-progress feature entries move into [Unreleased] with updated migration references; the [9.3.2] section is preserved verbatim. main.go version string bumped to 9.4.0-dev to mark the new cycle. Full test suite (go test -race ./...) passes.
This commit is contained in:
@@ -86,6 +86,7 @@ func (s *APIServer) Start() error {
|
||||
v2.HandleFunc("/version", s.Version).Methods("GET")
|
||||
v2.HandleFunc("/health", s.Health).Methods("GET")
|
||||
v2.HandleFunc("/server/status", s.ServerStatus).Methods("GET")
|
||||
v2.HandleFunc("/server/info", s.ServerInfo).Methods("GET")
|
||||
|
||||
// V2 authenticated routes
|
||||
v2Auth := v2.PathPrefix("").Subrouter()
|
||||
@@ -94,6 +95,7 @@ func (s *APIServer) Start() error {
|
||||
v2Auth.HandleFunc("/characters/{id}/delete", s.DeleteCharacter).Methods("POST")
|
||||
v2Auth.HandleFunc("/characters/{id}", s.DeleteCharacter).Methods("DELETE")
|
||||
v2Auth.HandleFunc("/characters/{id}/export", s.ExportSave).Methods("GET")
|
||||
v2Auth.HandleFunc("/characters/{id}/import", s.ImportSave).Methods("POST")
|
||||
|
||||
handler := handlers.CORS(
|
||||
handlers.AllowedHeaders([]string{"Content-Type", "Authorization"}),
|
||||
|
||||
153
server/api/dashboard_test.go
Normal file
153
server/api/dashboard_test.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TestDashboardStatsJSON_NoDB verifies the stats endpoint returns valid JSON
|
||||
// with safe zero values when no database is configured.
|
||||
func TestDashboardStatsJSON_NoDB(t *testing.T) {
|
||||
logger := NewTestLogger(t)
|
||||
defer func() { _ = logger.Sync() }()
|
||||
|
||||
server := &APIServer{
|
||||
logger: logger,
|
||||
erupeConfig: NewTestConfig(),
|
||||
startTime: time.Now().Add(-5 * time.Minute),
|
||||
// db intentionally nil
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/dashboard/stats", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
server.DashboardStatsJSON(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Errorf("Expected status %d, got %d", http.StatusOK, rec.Code)
|
||||
}
|
||||
|
||||
ct := rec.Header().Get("Content-Type")
|
||||
if !strings.HasPrefix(ct, "application/json") {
|
||||
t.Errorf("Expected Content-Type application/json, got %q", ct)
|
||||
}
|
||||
|
||||
var stats DashboardStats
|
||||
if err := json.NewDecoder(rec.Body).Decode(&stats); err != nil {
|
||||
t.Fatalf("Failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
// Verify required fields are present and have expected zero-DB values.
|
||||
if stats.ServerVersion == "" {
|
||||
t.Error("Expected non-empty ServerVersion")
|
||||
}
|
||||
if stats.Uptime == "" || stats.Uptime == "unknown" {
|
||||
// startTime is set so uptime should be computed, not "unknown".
|
||||
t.Errorf("Expected computed uptime, got %q", stats.Uptime)
|
||||
}
|
||||
if stats.TotalAccounts != 0 {
|
||||
t.Errorf("Expected TotalAccounts=0 without DB, got %d", stats.TotalAccounts)
|
||||
}
|
||||
if stats.TotalCharacters != 0 {
|
||||
t.Errorf("Expected TotalCharacters=0 without DB, got %d", stats.TotalCharacters)
|
||||
}
|
||||
if stats.OnlinePlayers != 0 {
|
||||
t.Errorf("Expected OnlinePlayers=0 without DB, got %d", stats.OnlinePlayers)
|
||||
}
|
||||
if stats.DatabaseOK {
|
||||
t.Error("Expected DatabaseOK=false without DB")
|
||||
}
|
||||
if stats.Channels != nil {
|
||||
t.Errorf("Expected nil Channels without DB, got %v", stats.Channels)
|
||||
}
|
||||
}
|
||||
|
||||
// TestDashboardStatsJSON_UptimeUnknown verifies "unknown" uptime when startTime is zero.
|
||||
func TestDashboardStatsJSON_UptimeUnknown(t *testing.T) {
|
||||
logger := NewTestLogger(t)
|
||||
defer func() { _ = logger.Sync() }()
|
||||
|
||||
server := &APIServer{
|
||||
logger: logger,
|
||||
erupeConfig: NewTestConfig(),
|
||||
// startTime is zero value
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/dashboard/stats", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
server.DashboardStatsJSON(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Errorf("Expected status %d, got %d", http.StatusOK, rec.Code)
|
||||
}
|
||||
|
||||
var stats DashboardStats
|
||||
if err := json.NewDecoder(rec.Body).Decode(&stats); err != nil {
|
||||
t.Fatalf("Failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
if stats.Uptime != "unknown" {
|
||||
t.Errorf("Expected Uptime='unknown' for zero startTime, got %q", stats.Uptime)
|
||||
}
|
||||
}
|
||||
|
||||
// TestDashboardStatsJSON_JSONShape validates every field of the DashboardStats payload.
|
||||
func TestDashboardStatsJSON_JSONShape(t *testing.T) {
|
||||
logger := NewTestLogger(t)
|
||||
defer func() { _ = logger.Sync() }()
|
||||
|
||||
server := &APIServer{
|
||||
logger: logger,
|
||||
erupeConfig: NewTestConfig(),
|
||||
startTime: time.Now(),
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/dashboard/stats", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
server.DashboardStatsJSON(rec, req)
|
||||
|
||||
// Decode into a raw map so we can check key presence independent of type.
|
||||
var raw map[string]interface{}
|
||||
if err := json.NewDecoder(rec.Body).Decode(&raw); err != nil {
|
||||
t.Fatalf("Failed to decode response as raw map: %v", err)
|
||||
}
|
||||
|
||||
requiredKeys := []string{
|
||||
"uptime", "serverVersion", "clientMode",
|
||||
"onlinePlayers", "totalAccounts", "totalCharacters",
|
||||
"databaseOK",
|
||||
}
|
||||
for _, key := range requiredKeys {
|
||||
if _, ok := raw[key]; !ok {
|
||||
t.Errorf("Missing required JSON key %q", key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestFormatDuration covers the human-readable duration formatter.
|
||||
func TestFormatDuration(t *testing.T) {
|
||||
tests := []struct {
|
||||
d time.Duration
|
||||
want string
|
||||
}{
|
||||
{10 * time.Second, "10s"},
|
||||
{90 * time.Second, "1m 30s"},
|
||||
{2*time.Hour + 15*time.Minute + 5*time.Second, "2h 15m 5s"},
|
||||
{25*time.Hour + 3*time.Minute + 0*time.Second, "1d 1h 3m 0s"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.want, func(t *testing.T) {
|
||||
got := formatDuration(tt.d)
|
||||
if got != tt.want {
|
||||
t.Errorf("formatDuration(%v) = %q, want %q", tt.d, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -2,13 +2,16 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"erupe-ce/common/gametime"
|
||||
"erupe-ce/common/mhfcourse"
|
||||
cfg "erupe-ce/config"
|
||||
"erupe-ce/server/channelserver/compression/nullcomp"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
@@ -153,6 +156,32 @@ func (s *APIServer) Version(w http.ResponseWriter, r *http.Request) {
|
||||
_ = json.NewEncoder(w).Encode(resp)
|
||||
}
|
||||
|
||||
// ServerInfoResponse is the JSON payload returned by GET /v2/server/info.
|
||||
// It exposes the server's configured game version in a form that launcher
|
||||
// tools (e.g. mhf-outpost) can use to check version compatibility.
|
||||
type ServerInfoResponse struct {
|
||||
// ClientMode is the version string as configured in Erupe (e.g. "ZZ", "G10.1").
|
||||
ClientMode string `json:"clientMode"`
|
||||
// ManifestID is the normalized form of ClientMode (lowercase, dots removed)
|
||||
// matching mhf-outpost manifest IDs (e.g. "zz", "g101").
|
||||
ManifestID string `json:"manifestId"`
|
||||
// Name is the server software name.
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// ServerInfo handles GET /v2/server/info, returning the server's configured
|
||||
// game version in a format compatible with mhf-outpost manifest IDs.
|
||||
func (s *APIServer) ServerInfo(w http.ResponseWriter, r *http.Request) {
|
||||
clientMode := s.erupeConfig.ClientMode
|
||||
resp := ServerInfoResponse{
|
||||
ClientMode: clientMode,
|
||||
ManifestID: strings.ToLower(strings.ReplaceAll(clientMode, ".", "")),
|
||||
Name: "Erupe-CE",
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_ = json.NewEncoder(w).Encode(resp)
|
||||
}
|
||||
|
||||
// Launcher handles GET /launcher and returns banners, messages, and links for the launcher UI.
|
||||
func (s *APIServer) Launcher(w http.ResponseWriter, r *http.Request) {
|
||||
var respData LauncherResponse
|
||||
@@ -590,3 +619,151 @@ func (s *APIServer) Health(w http.ResponseWriter, r *http.Request) {
|
||||
"status": "ok",
|
||||
})
|
||||
}
|
||||
|
||||
// ImportSave handles POST /v2/characters/{id}/import.
|
||||
// The request body must contain a one-time import_token (granted by an admin
|
||||
// via saveutil) plus a character export blob in the same format as ExportSave.
|
||||
func (s *APIServer) ImportSave(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
userID, _ := UserIDFromContext(ctx)
|
||||
|
||||
var charID uint32
|
||||
if _, err := fmt.Sscanf(mux.Vars(r)["id"], "%d", &charID); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "Invalid character ID")
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
ImportToken string `json:"import_token"`
|
||||
Character map[string]interface{} `json:"character"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "Malformed request body")
|
||||
return
|
||||
}
|
||||
if req.ImportToken == "" {
|
||||
writeError(w, http.StatusBadRequest, "missing_token", "import_token is required")
|
||||
return
|
||||
}
|
||||
|
||||
blobs, err := saveBlobsFromMap(req.Character)
|
||||
if err != nil {
|
||||
s.logger.Warn("ImportSave: failed to extract blobs", zap.Error(err), zap.Uint32("charID", charID))
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "Invalid save data: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Compute savedata hash server-side.
|
||||
if len(blobs.Savedata) > 0 {
|
||||
decompressed, err := nullcomp.Decompress(blobs.Savedata)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "savedata decompression failed")
|
||||
return
|
||||
}
|
||||
h := sha256.Sum256(decompressed)
|
||||
blobs.SavedataHash = h[:]
|
||||
}
|
||||
|
||||
if err := s.charRepo.ImportSave(ctx, charID, userID, req.ImportToken, blobs); err != nil {
|
||||
s.logger.Warn("ImportSave: failed", zap.Error(err), zap.Uint32("charID", charID))
|
||||
writeError(w, http.StatusForbidden, "import_denied", "Import token invalid, expired, or character not owned by user")
|
||||
return
|
||||
}
|
||||
|
||||
s.logger.Info("ImportSave: save imported successfully", zap.Uint32("charID", charID), zap.Uint32("userID", userID))
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
// saveBlobsFromMap extracts save blob columns from an export character map.
|
||||
// Values must be base64-encoded strings (as produced by json.Marshal on []byte).
|
||||
func saveBlobsFromMap(m map[string]interface{}) (SaveBlobs, error) {
|
||||
var b SaveBlobs
|
||||
var err error
|
||||
b.Savedata, err = extractBlob(m, "savedata")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Decomyset, err = extractBlob(m, "decomyset")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Hunternavi, err = extractBlob(m, "hunternavi")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Otomoairou, err = extractBlob(m, "otomoairou")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Partner, err = extractBlob(m, "partner")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Platebox, err = extractBlob(m, "platebox")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Platedata, err = extractBlob(m, "platedata")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Platemyset, err = extractBlob(m, "platemyset")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Rengokudata, err = extractBlob(m, "rengokudata")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Savemercenary, err = extractBlob(m, "savemercenary")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.GachaItems, err = extractBlob(m, "gacha_items")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.HouseInfo, err = extractBlob(m, "house_info")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.LoginBoost, err = extractBlob(m, "login_boost")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.SkinHist, err = extractBlob(m, "skin_hist")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Scenariodata, err = extractBlob(m, "scenariodata")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Savefavoritequest, err = extractBlob(m, "savefavoritequest")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
b.Mezfes, err = extractBlob(m, "mezfes")
|
||||
if err != nil {
|
||||
return b, err
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
// extractBlob decodes a single base64-encoded blob from a character export map.
|
||||
// Returns nil (not an error) if the key is absent or its value is JSON null.
|
||||
func extractBlob(m map[string]interface{}, key string) ([]byte, error) {
|
||||
v, ok := m[key]
|
||||
if !ok || v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("field %q: expected base64 string, got %T", key, v)
|
||||
}
|
||||
b, err := base64.StdEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("field %q: base64 decode: %w", key, err)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
@@ -44,6 +44,56 @@ func TestVersionEndpoint(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestServerInfoEndpoint(t *testing.T) {
|
||||
tests := []struct {
|
||||
clientMode string
|
||||
wantID string
|
||||
}{
|
||||
{"ZZ", "zz"},
|
||||
{"GG", "gg"},
|
||||
{"G10.1", "g101"},
|
||||
{"G9.1", "g91"},
|
||||
{"FW.5", "fw5"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.clientMode, func(t *testing.T) {
|
||||
logger := NewTestLogger(t)
|
||||
c := NewTestConfig()
|
||||
c.ClientMode = tt.clientMode
|
||||
|
||||
server := &APIServer{
|
||||
logger: logger,
|
||||
erupeConfig: c,
|
||||
}
|
||||
|
||||
req := httptest.NewRequest("GET", "/v2/server/info", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
server.ServerInfo(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Errorf("status = %d, want 200", rec.Code)
|
||||
}
|
||||
if ct := rec.Header().Get("Content-Type"); ct != "application/json" {
|
||||
t.Errorf("Content-Type = %q, want application/json", ct)
|
||||
}
|
||||
|
||||
var resp ServerInfoResponse
|
||||
if err := json.NewDecoder(rec.Body).Decode(&resp); err != nil {
|
||||
t.Fatalf("decode error: %v", err)
|
||||
}
|
||||
if resp.ClientMode != tt.clientMode {
|
||||
t.Errorf("ClientMode = %q, want %q", resp.ClientMode, tt.clientMode)
|
||||
}
|
||||
if resp.ManifestID != tt.wantID {
|
||||
t.Errorf("ManifestID = %q, want %q", resp.ManifestID, tt.wantID)
|
||||
}
|
||||
if resp.Name != "Erupe-CE" {
|
||||
t.Errorf("Name = %q, want Erupe-CE", resp.Name)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLandingPageEndpoint_Enabled(t *testing.T) {
|
||||
logger := NewTestLogger(t)
|
||||
c := NewTestConfig()
|
||||
|
||||
@@ -2,6 +2,8 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
@@ -89,3 +91,80 @@ func (r *APICharacterRepository) ExportSave(ctx context.Context, userID, charID
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (r *APICharacterRepository) GrantImportToken(ctx context.Context, charID, userID uint32, token string, expiry time.Time) error {
|
||||
res, err := r.db.ExecContext(ctx,
|
||||
`UPDATE characters SET savedata_import_token=$1, savedata_import_token_expiry=$2
|
||||
WHERE id=$3 AND user_id=$4 AND deleted=false`,
|
||||
token, expiry, charID, userID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n, err := res.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n == 0 {
|
||||
return errors.New("character not found or not owned by user")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *APICharacterRepository) RevokeImportToken(ctx context.Context, charID, userID uint32) error {
|
||||
_, err := r.db.ExecContext(ctx,
|
||||
`UPDATE characters SET savedata_import_token=NULL, savedata_import_token_expiry=NULL
|
||||
WHERE id=$1 AND user_id=$2`,
|
||||
charID, userID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *APICharacterRepository) ImportSave(ctx context.Context, charID, userID uint32, token string, blobs SaveBlobs) error {
|
||||
tx, err := r.db.BeginTxx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = tx.Rollback() }()
|
||||
|
||||
// Validate token ownership and expiry, then clear it — all in one UPDATE.
|
||||
res, err := tx.ExecContext(ctx,
|
||||
`UPDATE characters
|
||||
SET savedata_import_token=NULL, savedata_import_token_expiry=NULL
|
||||
WHERE id=$1 AND user_id=$2
|
||||
AND savedata_import_token=$3
|
||||
AND savedata_import_token_expiry > now()`,
|
||||
charID, userID, token,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n, err := res.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n == 0 {
|
||||
return errors.New("import token invalid, expired, or character not owned by user")
|
||||
}
|
||||
|
||||
// Write all save blobs.
|
||||
_, err = tx.ExecContext(ctx,
|
||||
`UPDATE characters SET
|
||||
savedata=$1, savedata_hash=$2, decomyset=$3, hunternavi=$4,
|
||||
otomoairou=$5, partner=$6, platebox=$7, platedata=$8,
|
||||
platemyset=$9, rengokudata=$10, savemercenary=$11, gacha_items=$12,
|
||||
house_info=$13, login_boost=$14, skin_hist=$15, scenariodata=$16,
|
||||
savefavoritequest=$17, mezfes=$18
|
||||
WHERE id=$19`,
|
||||
blobs.Savedata, blobs.SavedataHash, blobs.Decomyset, blobs.Hunternavi,
|
||||
blobs.Otomoairou, blobs.Partner, blobs.Platebox, blobs.Platedata,
|
||||
blobs.Platemyset, blobs.Rengokudata, blobs.Savemercenary, blobs.GachaItems,
|
||||
blobs.HouseInfo, blobs.LoginBoost, blobs.SkinHist, blobs.Scenariodata,
|
||||
blobs.Savefavoritequest, blobs.Mezfes,
|
||||
charID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
@@ -8,6 +8,29 @@ import (
|
||||
// Repository interfaces decouple API server business logic from concrete
|
||||
// PostgreSQL implementations, enabling mock/stub injection for unit tests.
|
||||
|
||||
// SaveBlobs holds the transferable save data columns for a character.
|
||||
// SavedataHash must be set by the caller (SHA-256 of decompressed Savedata).
|
||||
type SaveBlobs struct {
|
||||
Savedata []byte
|
||||
SavedataHash []byte
|
||||
Decomyset []byte
|
||||
Hunternavi []byte
|
||||
Otomoairou []byte
|
||||
Partner []byte
|
||||
Platebox []byte
|
||||
Platedata []byte
|
||||
Platemyset []byte
|
||||
Rengokudata []byte
|
||||
Savemercenary []byte
|
||||
GachaItems []byte
|
||||
HouseInfo []byte
|
||||
LoginBoost []byte
|
||||
SkinHist []byte
|
||||
Scenariodata []byte
|
||||
Savefavoritequest []byte
|
||||
Mezfes []byte
|
||||
}
|
||||
|
||||
// APIUserRepo defines the contract for user-related data access.
|
||||
type APIUserRepo interface {
|
||||
// Register creates a new user and returns their ID and rights.
|
||||
@@ -42,6 +65,13 @@ type APICharacterRepo interface {
|
||||
GetForUser(ctx context.Context, userID uint32) ([]Character, error)
|
||||
// ExportSave returns the full character row as a map.
|
||||
ExportSave(ctx context.Context, userID, charID uint32) (map[string]interface{}, error)
|
||||
// GrantImportToken sets a one-time import token for a character owned by userID.
|
||||
GrantImportToken(ctx context.Context, charID, userID uint32, token string, expiry time.Time) error
|
||||
// RevokeImportToken clears any pending import token for a character owned by userID.
|
||||
RevokeImportToken(ctx context.Context, charID, userID uint32) error
|
||||
// ImportSave atomically validates+consumes the import token and writes all save blobs.
|
||||
// Returns an error if the token is invalid, expired, or the character doesn't belong to userID.
|
||||
ImportSave(ctx context.Context, charID, userID uint32, token string, blobs SaveBlobs) error
|
||||
}
|
||||
|
||||
// APIEventRepo defines the contract for read-only event data access.
|
||||
|
||||
@@ -72,6 +72,10 @@ type mockAPICharacterRepo struct {
|
||||
|
||||
exportResult map[string]interface{}
|
||||
exportErr error
|
||||
|
||||
grantImportTokenErr error
|
||||
revokeImportTokenErr error
|
||||
importSaveErr error
|
||||
}
|
||||
|
||||
func (m *mockAPICharacterRepo) GetNewCharacter(_ context.Context, _ uint32) (Character, error) {
|
||||
@@ -106,6 +110,18 @@ func (m *mockAPICharacterRepo) ExportSave(_ context.Context, _, _ uint32) (map[s
|
||||
return m.exportResult, m.exportErr
|
||||
}
|
||||
|
||||
func (m *mockAPICharacterRepo) GrantImportToken(_ context.Context, _, _ uint32, _ string, _ time.Time) error {
|
||||
return m.grantImportTokenErr
|
||||
}
|
||||
|
||||
func (m *mockAPICharacterRepo) RevokeImportToken(_ context.Context, _, _ uint32) error {
|
||||
return m.revokeImportTokenErr
|
||||
}
|
||||
|
||||
func (m *mockAPICharacterRepo) ImportSave(_ context.Context, _, _ uint32, _ string, _ SaveBlobs) error {
|
||||
return m.importSaveErr
|
||||
}
|
||||
|
||||
// mockAPIEventRepo implements APIEventRepo for testing.
|
||||
type mockAPIEventRepo struct {
|
||||
featureWeapon *FeatureWeaponRow
|
||||
|
||||
@@ -44,6 +44,7 @@ func newTestRouter(s *APIServer) *mux.Router {
|
||||
v2Auth.HandleFunc("/characters/{id}/export", s.ExportSave).Methods("GET")
|
||||
|
||||
v2.HandleFunc("/server/status", s.ServerStatus).Methods("GET")
|
||||
v2.HandleFunc("/server/info", s.ServerInfo).Methods("GET")
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
@@ -43,7 +43,8 @@ type Guild struct {
|
||||
EventRP uint32 `db:"event_rp"`
|
||||
RoomRP uint16 `db:"room_rp"`
|
||||
RoomExpiry time.Time `db:"room_expiry"`
|
||||
Comment string `db:"comment"`
|
||||
Comment string `db:"comment"`
|
||||
ReturnType uint8 `db:"return_type"`
|
||||
PugiName1 string `db:"pugi_name_1"`
|
||||
PugiName2 string `db:"pugi_name_2"`
|
||||
PugiName3 string `db:"pugi_name_3"`
|
||||
|
||||
@@ -9,55 +9,83 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// CampaignEvent represents a promotional campaign event.
|
||||
type CampaignEvent struct {
|
||||
ID uint32
|
||||
Unk0 uint32
|
||||
MinHR int16
|
||||
MaxHR int16
|
||||
MinSR int16
|
||||
MaxSR int16
|
||||
MinGR int16
|
||||
MaxGR int16
|
||||
Unk1 uint16
|
||||
Unk2 uint8
|
||||
Unk3 uint8
|
||||
Unk4 uint16
|
||||
Unk5 uint16
|
||||
Start time.Time
|
||||
End time.Time
|
||||
Unk6 uint8
|
||||
String0 string
|
||||
String1 string
|
||||
String2 string
|
||||
String3 string
|
||||
Link string
|
||||
Prefix string
|
||||
Categories []uint16
|
||||
ID uint32 `db:"id"`
|
||||
MinHR int16 `db:"min_hr"`
|
||||
MaxHR int16 `db:"max_hr"`
|
||||
MinSR int16 `db:"min_sr"`
|
||||
MaxSR int16 `db:"max_sr"`
|
||||
MinGR int16 `db:"min_gr"`
|
||||
MaxGR int16 `db:"max_gr"`
|
||||
RewardType uint16 `db:"reward_type"`
|
||||
Stamps uint8 `db:"stamps"`
|
||||
ReceiveType uint8 `db:"receive_type"`
|
||||
BackgroundID uint16 `db:"background_id"`
|
||||
Start time.Time `db:"start_time"`
|
||||
End time.Time `db:"end_time"`
|
||||
Title string `db:"title"`
|
||||
Reward string `db:"reward"`
|
||||
Link string `db:"link"`
|
||||
Prefix string `db:"code_prefix"`
|
||||
}
|
||||
|
||||
// CampaignCategory represents a category grouping for campaign events.
|
||||
type CampaignCategory struct {
|
||||
ID uint16
|
||||
Type uint8
|
||||
Title string
|
||||
Description string
|
||||
ID uint16 `db:"id"`
|
||||
Type uint8 `db:"type"`
|
||||
Title string `db:"title"`
|
||||
Description string `db:"description"`
|
||||
}
|
||||
|
||||
// CampaignLink links a campaign event to its items/rewards.
|
||||
type CampaignLink struct {
|
||||
CategoryID uint16
|
||||
CampaignID uint32
|
||||
CategoryID uint16 `db:"category_id"`
|
||||
CampaignID uint32 `db:"campaign_id"`
|
||||
}
|
||||
|
||||
type CampaignReward struct {
|
||||
ID uint32 `db:"id"`
|
||||
ItemType uint16 `db:"item_type"`
|
||||
Quantity uint16 `db:"quantity"`
|
||||
ItemID uint16 `db:"item_id"`
|
||||
Deadline time.Time `db:"deadline"`
|
||||
}
|
||||
|
||||
// campaignRequiredStamps returns the stamp requirement for a campaign,
|
||||
// clamping to a minimum of 1. Campaigns with 0 stamps in the DB are
|
||||
// treated as requiring a single stamp (code redemption) to unlock.
|
||||
func campaignRequiredStamps(stamps int) int {
|
||||
if stamps < 1 {
|
||||
return 1
|
||||
}
|
||||
return stamps
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateCampaign)
|
||||
if s.server.db == nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
events := []CampaignEvent{}
|
||||
categories := []CampaignCategory{}
|
||||
var events []CampaignEvent
|
||||
var categories []CampaignCategory
|
||||
var campaignLinks []CampaignLink
|
||||
|
||||
err := s.server.db.Select(&events, "SELECT id,min_hr,max_hr,min_sr,max_sr,min_gr,max_gr,reward_type,stamps,receive_type,background_id,start_time,end_time,title,reward,link,code_prefix FROM campaigns")
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
err = s.server.db.Select(&categories, "SELECT id, type, title, description FROM campaign_categories")
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
err = s.server.db.Select(&campaignLinks, "SELECT campaign_id, category_id FROM campaign_category_links")
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
if len(events) > 255 {
|
||||
bf.WriteUint8(255)
|
||||
bf.WriteUint16(uint16(len(events)))
|
||||
@@ -66,7 +94,7 @@ func handleMsgMhfEnumerateCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
}
|
||||
for _, event := range events {
|
||||
bf.WriteUint32(event.ID)
|
||||
bf.WriteUint32(event.Unk0)
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteInt16(event.MinHR)
|
||||
bf.WriteInt16(event.MaxHR)
|
||||
bf.WriteInt16(event.MinSR)
|
||||
@@ -75,34 +103,19 @@ func handleMsgMhfEnumerateCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
bf.WriteInt16(event.MinGR)
|
||||
bf.WriteInt16(event.MaxGR)
|
||||
}
|
||||
bf.WriteUint16(event.Unk1)
|
||||
bf.WriteUint8(event.Unk2)
|
||||
bf.WriteUint8(event.Unk3)
|
||||
bf.WriteUint16(event.Unk4)
|
||||
bf.WriteUint16(event.Unk5)
|
||||
bf.WriteUint16(event.RewardType)
|
||||
bf.WriteUint8(event.Stamps)
|
||||
bf.WriteUint8(event.ReceiveType)
|
||||
bf.WriteUint16(event.BackgroundID)
|
||||
bf.WriteUint16(0)
|
||||
bf.WriteUint32(uint32(event.Start.Unix()))
|
||||
bf.WriteUint32(uint32(event.End.Unix()))
|
||||
bf.WriteUint8(event.Unk6)
|
||||
ps.Uint8(bf, event.String0, true)
|
||||
ps.Uint8(bf, event.String1, true)
|
||||
ps.Uint8(bf, event.String2, true)
|
||||
ps.Uint8(bf, event.String3, true)
|
||||
bf.WriteBool(event.End.Before(time.Now()))
|
||||
ps.Uint8(bf, event.Title, true)
|
||||
ps.Uint8(bf, event.Reward, true)
|
||||
ps.Uint8(bf, event.Prefix, true)
|
||||
ps.Uint8(bf, "", false)
|
||||
ps.Uint8(bf, event.Link, true)
|
||||
for i := range event.Categories {
|
||||
campaignLinks = append(campaignLinks, CampaignLink{event.Categories[i], event.ID})
|
||||
}
|
||||
}
|
||||
|
||||
if len(events) > 255 {
|
||||
bf.WriteUint8(255)
|
||||
bf.WriteUint16(uint16(len(events)))
|
||||
} else {
|
||||
bf.WriteUint8(uint8(len(events)))
|
||||
}
|
||||
for _, event := range events {
|
||||
bf.WriteUint32(event.ID)
|
||||
bf.WriteUint8(1) // Always 1?
|
||||
bf.WriteBytes([]byte(event.Prefix))
|
||||
}
|
||||
|
||||
if len(categories) > 255 {
|
||||
@@ -137,43 +150,185 @@ func handleMsgMhfEnumerateCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
func handleMsgMhfStateCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfStateCampaign)
|
||||
if s.server.db == nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint16(1)
|
||||
bf.WriteUint16(0)
|
||||
var required int
|
||||
var deadline time.Time
|
||||
var stamps []uint32
|
||||
|
||||
err := s.server.db.Select(&stamps, "SELECT id FROM campaign_state WHERE campaign_id = $1 AND character_id = $2", pkt.CampaignID, s.charID)
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
err = s.server.db.QueryRow(`SELECT stamps, end_time FROM campaigns WHERE id = $1`, pkt.CampaignID).Scan(&required, &deadline)
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
bf.WriteUint16(uint16(len(stamps)))
|
||||
required = campaignRequiredStamps(required)
|
||||
|
||||
if len(stamps) >= required && deadline.After(time.Now()) {
|
||||
bf.WriteUint16(2)
|
||||
} else {
|
||||
bf.WriteUint16(0)
|
||||
}
|
||||
|
||||
for _, v := range stamps {
|
||||
bf.WriteUint32(v)
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfApplyCampaign(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfApplyCampaign)
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint32(1)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, bf.Data())
|
||||
if s.server.db == nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
// Check if the code exists, belongs to this campaign, and check if it's a multi-code
|
||||
var multi bool
|
||||
err := s.server.db.QueryRow(`SELECT multi FROM public.campaign_codes WHERE code = $1 AND campaign_id = $2`, pkt.Code, pkt.CampaignID).Scan(&multi)
|
||||
if err != nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the code is already used
|
||||
var exists bool
|
||||
if multi {
|
||||
err = s.server.db.QueryRow(`SELECT COUNT(*) > 0 FROM public.campaign_state WHERE code = $1 AND character_id = $2`, pkt.Code, s.charID).Scan(&exists)
|
||||
} else {
|
||||
err = s.server.db.QueryRow(`SELECT COUNT(*) > 0 FROM public.campaign_state WHERE code = $1`, pkt.Code).Scan(&exists)
|
||||
}
|
||||
if err != nil || exists {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
_, err = s.server.db.Exec(`INSERT INTO public.campaign_state (code, campaign_id, character_id) VALUES ($1, $2, $3)`, pkt.Code, pkt.CampaignID, s.charID)
|
||||
if err != nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateItem(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateItem)
|
||||
items := []struct {
|
||||
Unk0 uint32
|
||||
Unk1 uint16
|
||||
Unk2 uint16
|
||||
Unk3 uint16
|
||||
Unk4 uint32
|
||||
Unk5 uint32
|
||||
}{}
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint16(uint16(len(items)))
|
||||
for _, item := range items {
|
||||
bf.WriteUint32(item.Unk0)
|
||||
bf.WriteUint16(item.Unk1)
|
||||
bf.WriteUint16(item.Unk2)
|
||||
bf.WriteUint16(item.Unk3)
|
||||
bf.WriteUint32(item.Unk4)
|
||||
bf.WriteUint32(item.Unk5)
|
||||
if s.server.db == nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
var stamps, required, rewardType uint16
|
||||
err := s.server.db.QueryRow(`SELECT COUNT(*) FROM campaign_state WHERE campaign_id = $1 AND character_id = $2`, pkt.CampaignID, s.charID).Scan(&stamps)
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
err = s.server.db.QueryRow(`SELECT stamps, reward_type FROM campaigns WHERE id = $1`, pkt.CampaignID).Scan(&required, &rewardType)
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
required = uint16(campaignRequiredStamps(int(required)))
|
||||
|
||||
if stamps >= required {
|
||||
var items []CampaignReward
|
||||
if rewardType == 2 {
|
||||
var exists int
|
||||
err = s.server.db.QueryRow(`SELECT COUNT(*) FROM campaign_quest WHERE campaign_id = $1 AND character_id = $2`, pkt.CampaignID, s.charID).Scan(&exists)
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
if exists > 0 {
|
||||
err = s.server.db.Select(&items, `
|
||||
SELECT id, item_type, quantity, item_id, TO_TIMESTAMP(0) AS deadline FROM campaign_rewards
|
||||
WHERE campaign_id = $1 AND item_type != 9
|
||||
AND NOT EXISTS (SELECT 1 FROM campaign_rewards_claimed WHERE reward_id = campaign_rewards.id AND character_id = $2)
|
||||
`, pkt.CampaignID, s.charID)
|
||||
} else {
|
||||
err = s.server.db.Select(&items, `
|
||||
SELECT cr.id, cr.item_type, cr.quantity, cr.item_id, COALESCE(c.end_time, TO_TIMESTAMP(0)) AS deadline FROM campaign_rewards cr
|
||||
JOIN campaigns c ON cr.campaign_id = c.id
|
||||
WHERE campaign_id = $1 AND item_type = 9`, pkt.CampaignID)
|
||||
}
|
||||
} else {
|
||||
err = s.server.db.Select(&items, `
|
||||
SELECT id, item_type, quantity, item_id, TO_TIMESTAMP(0) AS deadline FROM campaign_rewards
|
||||
WHERE campaign_id = $1
|
||||
AND NOT EXISTS (SELECT 1 FROM campaign_rewards_claimed WHERE reward_id = campaign_rewards.id AND character_id = $2)
|
||||
`, pkt.CampaignID, s.charID)
|
||||
}
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
bf.WriteUint16(uint16(len(items)))
|
||||
for _, item := range items {
|
||||
bf.WriteUint32(item.ID)
|
||||
bf.WriteUint16(item.ItemType)
|
||||
bf.WriteUint16(item.Quantity)
|
||||
bf.WriteUint16(item.ItemID) //HACK:placed quest id in this field to fit with Item No pattern. however it could be another field... possibly the other unks.
|
||||
bf.WriteUint16(0) //Unk4, gets cast to uint8
|
||||
bf.WriteUint32(0) //Unk5
|
||||
bf.WriteUint32(uint32(item.Deadline.Unix()))
|
||||
}
|
||||
if len(items) == 0 {
|
||||
doAckBufSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
} else {
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
} else {
|
||||
doAckBufSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfAcquireItem(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfAcquireItem)
|
||||
if s.server.db == nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
for _, id := range pkt.RewardIDs {
|
||||
_, err := s.server.db.Exec(`INSERT INTO campaign_rewards_claimed (reward_id, character_id) VALUES ($1, $2)`, id, s.charID)
|
||||
if err != nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
}
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
func handleMsgMhfTransferItem(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfTransferItem)
|
||||
if s.server.db == nil {
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
if pkt.ItemType == 9 {
|
||||
var campaignID uint32
|
||||
err := s.server.db.QueryRow(`
|
||||
SELECT ce.campaign_id FROM campaign_rewards ce
|
||||
JOIN event_quests eq ON ce.item_id = eq.quest_id
|
||||
WHERE eq.id = $1
|
||||
`, pkt.QuestID).Scan(&campaignID)
|
||||
if err == nil {
|
||||
_, err = s.server.db.Exec(`INSERT INTO campaign_quest (campaign_id, character_id) VALUES ($1, $2)`, campaignID, s.charID)
|
||||
if err != nil {
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
@@ -66,8 +66,7 @@ func GetCharacterSaveData(s *Session, charID uint32) (*CharacterSaveData, error)
|
||||
zap.Binary("stored_hash", storedHash),
|
||||
zap.Binary("computed_hash", computedHash[:]),
|
||||
)
|
||||
// TODO: attempt recovery from savedata_backups here
|
||||
return nil, errors.New("savedata integrity check failed")
|
||||
return recoverFromBackups(s, saveData, charID)
|
||||
}
|
||||
} else if storedHash != nil && s.server.erupeConfig.DisableSaveIntegrityCheck {
|
||||
s.logger.Warn("Savedata integrity check skipped (DisableSaveIntegrityCheck=true)",
|
||||
@@ -80,6 +79,77 @@ func GetCharacterSaveData(s *Session, charID uint32) (*CharacterSaveData, error)
|
||||
return saveData, nil
|
||||
}
|
||||
|
||||
// recoverFromBackups is called when the primary savedata fails its integrity check.
|
||||
// It queries savedata_backups in recency order and returns the first slot whose
|
||||
// compressed blob decompresses cleanly. It never writes to the database — the
|
||||
// next successful Save() will overwrite the primary with fresh data and a new hash,
|
||||
// self-healing the corruption without any extra recovery writes.
|
||||
func recoverFromBackups(s *Session, base *CharacterSaveData, charID uint32) (*CharacterSaveData, error) {
|
||||
backups, err := s.server.charRepo.LoadBackupsByRecency(charID)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to load savedata backups during recovery",
|
||||
zap.Uint32("charID", charID),
|
||||
zap.Error(err),
|
||||
)
|
||||
return nil, errors.New("savedata integrity check failed")
|
||||
}
|
||||
|
||||
if len(backups) == 0 {
|
||||
s.logger.Error("Savedata corrupted and no backups available",
|
||||
zap.Uint32("charID", charID),
|
||||
)
|
||||
return nil, errors.New("savedata integrity check failed: no backups available")
|
||||
}
|
||||
|
||||
for _, backup := range backups {
|
||||
candidate := &CharacterSaveData{
|
||||
CharID: base.CharID,
|
||||
IsNewCharacter: base.IsNewCharacter,
|
||||
Name: base.Name,
|
||||
Mode: base.Mode,
|
||||
Pointers: base.Pointers,
|
||||
compSave: backup.Data,
|
||||
}
|
||||
|
||||
if err := candidate.Decompress(); err != nil {
|
||||
s.logger.Warn("Backup slot decompression failed during recovery, trying next",
|
||||
zap.Uint32("charID", charID),
|
||||
zap.Int("slot", backup.Slot),
|
||||
zap.Time("saved_at", backup.SavedAt),
|
||||
zap.Error(err),
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
// nullcomp passes through data without a "cmp" header as-is (legitimate for
|
||||
// old uncompressed saves). Guard against garbage data that is too small to
|
||||
// contain the minimum save layout (name field at offset 88–100).
|
||||
const minSaveSize = saveFieldNameOffset + saveFieldNameLen
|
||||
if len(candidate.decompSave) < minSaveSize {
|
||||
s.logger.Warn("Backup slot data too small after decompression, skipping",
|
||||
zap.Uint32("charID", charID),
|
||||
zap.Int("slot", backup.Slot),
|
||||
zap.Int("size", len(candidate.decompSave)),
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
s.logger.Warn("Savedata recovered from backup — primary was corrupt",
|
||||
zap.Uint32("charID", charID),
|
||||
zap.Int("slot", backup.Slot),
|
||||
zap.Time("saved_at", backup.SavedAt),
|
||||
)
|
||||
candidate.updateStructWithSaveData()
|
||||
return candidate, nil
|
||||
}
|
||||
|
||||
s.logger.Error("Savedata corrupted and all backup slots failed decompression",
|
||||
zap.Uint32("charID", charID),
|
||||
zap.Int("backups_tried", len(backups)),
|
||||
)
|
||||
return nil, errors.New("savedata integrity check failed: all backup slots exhausted")
|
||||
}
|
||||
|
||||
func (save *CharacterSaveData) Save(s *Session) error {
|
||||
if save.decompSave == nil {
|
||||
s.logger.Warn("No decompressed save data, skipping save",
|
||||
|
||||
@@ -446,6 +446,144 @@ func TestGetCharacterSaveData_Integration(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestGetCharacterSaveData_BackupRecovery tests that a character whose primary
|
||||
// savedata has a hash mismatch is transparently recovered from the backup table.
|
||||
func TestGetCharacterSaveData_BackupRecovery(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
// Build valid compressed savedata (same layout as CreateTestCharacter).
|
||||
rawSave := make([]byte, 150000)
|
||||
copy(rawSave[88:], append([]byte("BackupChar"), 0x00))
|
||||
validCompressed, err := nullcomp.Compress(rawSave)
|
||||
if err != nil {
|
||||
t.Fatalf("compress valid savedata: %v", err)
|
||||
}
|
||||
|
||||
// Build a compressed blob that will fail decompression (garbage bytes).
|
||||
invalidCompressed := []byte("this is not valid compressed data")
|
||||
|
||||
corruptHash := make([]byte, 32) // all-zero hash is wrong for any real savedata
|
||||
corruptHash[0] = 0xFF
|
||||
|
||||
repo := NewCharacterRepository(db)
|
||||
|
||||
t.Run("recovers_from_most_recent_backup", func(t *testing.T) {
|
||||
userID := CreateTestUser(t, db, "recovery_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "BackupChar")
|
||||
|
||||
// Store a valid backup in slot 0.
|
||||
if err := repo.SaveBackup(charID, 0, validCompressed); err != nil {
|
||||
t.Fatalf("SaveBackup: %v", err)
|
||||
}
|
||||
|
||||
// Set a wrong hash on the primary so the integrity check fails.
|
||||
if _, err := db.Exec("UPDATE characters SET savedata_hash = $1 WHERE id = $2", corruptHash, charID); err != nil {
|
||||
t.Fatalf("set corrupt hash: %v", err)
|
||||
}
|
||||
|
||||
mock := &MockCryptConn{sentPackets: make([][]byte, 0)}
|
||||
s := createTestSession(mock)
|
||||
s.charID = charID
|
||||
SetTestDB(s.server, db)
|
||||
s.server.erupeConfig.RealClientMode = cfg.Z2
|
||||
|
||||
got, err := GetCharacterSaveData(s, charID)
|
||||
if err != nil {
|
||||
t.Fatalf("GetCharacterSaveData() unexpected error: %v", err)
|
||||
}
|
||||
if got == nil {
|
||||
t.Fatal("GetCharacterSaveData() returned nil")
|
||||
}
|
||||
if got.CharID != charID {
|
||||
t.Errorf("CharID = %d, want %d", got.CharID, charID)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("skips_corrupt_backup_and_uses_next", func(t *testing.T) {
|
||||
userID := CreateTestUser(t, db, "multibackup_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "BackupChar")
|
||||
|
||||
// Slot 1 is newer (saved second) but has invalid compressed data.
|
||||
// Slot 0 is older but valid. Recovery must skip slot 1 and use slot 0.
|
||||
if err := repo.SaveBackup(charID, 0, validCompressed); err != nil {
|
||||
t.Fatalf("SaveBackup slot 0: %v", err)
|
||||
}
|
||||
if err := repo.SaveBackup(charID, 1, invalidCompressed); err != nil {
|
||||
t.Fatalf("SaveBackup slot 1: %v", err)
|
||||
}
|
||||
// Update slot 1's saved_at to be newer than slot 0.
|
||||
if _, err := db.Exec(
|
||||
"UPDATE savedata_backups SET saved_at = now() + interval '1 minute' WHERE char_id = $1 AND slot = 1",
|
||||
charID,
|
||||
); err != nil {
|
||||
t.Fatalf("update saved_at: %v", err)
|
||||
}
|
||||
|
||||
if _, err := db.Exec("UPDATE characters SET savedata_hash = $1 WHERE id = $2", corruptHash, charID); err != nil {
|
||||
t.Fatalf("set corrupt hash: %v", err)
|
||||
}
|
||||
|
||||
mock := &MockCryptConn{sentPackets: make([][]byte, 0)}
|
||||
s := createTestSession(mock)
|
||||
s.charID = charID
|
||||
SetTestDB(s.server, db)
|
||||
s.server.erupeConfig.RealClientMode = cfg.Z2
|
||||
|
||||
got, err := GetCharacterSaveData(s, charID)
|
||||
if err != nil {
|
||||
t.Fatalf("GetCharacterSaveData() unexpected error: %v", err)
|
||||
}
|
||||
if got == nil {
|
||||
t.Fatal("GetCharacterSaveData() returned nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("returns_error_when_no_backups", func(t *testing.T) {
|
||||
userID := CreateTestUser(t, db, "nobackup_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "NoBackupChar")
|
||||
|
||||
if _, err := db.Exec("UPDATE characters SET savedata_hash = $1 WHERE id = $2", corruptHash, charID); err != nil {
|
||||
t.Fatalf("set corrupt hash: %v", err)
|
||||
}
|
||||
|
||||
mock := &MockCryptConn{sentPackets: make([][]byte, 0)}
|
||||
s := createTestSession(mock)
|
||||
s.charID = charID
|
||||
SetTestDB(s.server, db)
|
||||
s.server.erupeConfig.RealClientMode = cfg.Z2
|
||||
|
||||
_, err := GetCharacterSaveData(s, charID)
|
||||
if err == nil {
|
||||
t.Fatal("expected error when no backups available, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("returns_error_when_all_backups_corrupt", func(t *testing.T) {
|
||||
userID := CreateTestUser(t, db, "allcorrupt_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "AllCorruptChar")
|
||||
|
||||
if err := repo.SaveBackup(charID, 0, invalidCompressed); err != nil {
|
||||
t.Fatalf("SaveBackup: %v", err)
|
||||
}
|
||||
|
||||
if _, err := db.Exec("UPDATE characters SET savedata_hash = $1 WHERE id = $2", corruptHash, charID); err != nil {
|
||||
t.Fatalf("set corrupt hash: %v", err)
|
||||
}
|
||||
|
||||
mock := &MockCryptConn{sentPackets: make([][]byte, 0)}
|
||||
s := createTestSession(mock)
|
||||
s.charID = charID
|
||||
SetTestDB(s.server, db)
|
||||
s.server.erupeConfig.RealClientMode = cfg.Z2
|
||||
|
||||
_, err := GetCharacterSaveData(s, charID)
|
||||
if err == nil {
|
||||
t.Fatal("expected error when all backups corrupt, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestCharacterSaveData_Save_Integration tests saving character data to database
|
||||
func TestCharacterSaveData_Save_Integration(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
|
||||
@@ -123,6 +123,7 @@ func handleMsgMhfSavedata(s *Session, p mhfpacket.MHFPacket) {
|
||||
if characterSaveData.Name == s.Name || s.server.erupeConfig.RealClientMode <= cfg.S10 {
|
||||
if err := characterSaveData.Save(s); err != nil {
|
||||
s.logger.Error("Failed to save character data", zap.Error(err))
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
s.logger.Info("Wrote recompressed savedata back to DB.")
|
||||
|
||||
@@ -23,6 +23,9 @@ func cleanupDiva(s *Session) {
|
||||
if err := s.server.divaRepo.DeleteEvents(); err != nil {
|
||||
s.logger.Error("Failed to delete diva events", zap.Error(err))
|
||||
}
|
||||
if err := s.server.divaRepo.CleanupBeads(); err != nil {
|
||||
s.logger.Error("Failed to cleanup diva beads", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
func generateDivaTimestamps(s *Session, start uint32, debug bool) []uint32 {
|
||||
@@ -137,16 +140,50 @@ func handleMsgMhfGetUdInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
doAckBufSucceed(s, pkt.AckHandle, resp.Data())
|
||||
}
|
||||
|
||||
// defaultBeadTypes are used when the database has no bead rows configured.
|
||||
var defaultBeadTypes = []int{1, 3, 4, 8}
|
||||
|
||||
func handleMsgMhfGetKijuInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetKijuInfo)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("04965C959782CC8B468EEC00000000000000000000000000000000000000000000815C82A082E782B582DC82A982BA82CC82AB82B682E3815C0A965C959782C682CD96D282E98E7682A281420A95B782AD8ED282C997458B4382F0975E82A682E98142000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001018BAD8C8282CC8B468EEC00000000000000000000000000000000000000000000815C82AB82E582A482B082AB82CC82AB82B682E3815C0A8BAD8C8282C682CD8BAD82A290BA904681420A95B782AD8ED282CC97CD82F08CA482AC909F82DC82B78142200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003138C8B8F5782CC8B468EEC00000000000000000000000000000000000000000000815C82AF82C182B582E382A482CC82AB82B682E3815C0A8C8B8F5782C682CD8A6D8CC582BD82E9904D978A81420A8F5782DF82E982D982C782C98EEB906C82BD82BF82CC90B8905F97CD82C682C882E9814200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041189CC8CEC82CC8B468EEC00000000000000000000000000000000000000000000815C82A482BD82DC82E082E882CC82AB82B682E3815C0A89CC8CEC82C682CD89CC955082CC8CEC82E881420A8F5782DF82E982D982C782C98EEB906C82BD82BF82CC8E7882A682C682C882E9814220000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000212")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
|
||||
// RE-confirmed entry layout (546 bytes each):
|
||||
// +0x000 char[32] name
|
||||
// +0x020 char[512] description
|
||||
// +0x220 u8 color_id (slot index, 1-based)
|
||||
// +0x221 u8 bead_type (effect ID)
|
||||
// Response: u8 count + count × 546 bytes.
|
||||
beadTypes, err := s.server.divaRepo.GetBeads()
|
||||
if err != nil || len(beadTypes) == 0 {
|
||||
beadTypes = defaultBeadTypes
|
||||
}
|
||||
|
||||
lang := getLangStrings(s.server)
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint8(uint8(len(beadTypes)))
|
||||
for i, bt := range beadTypes {
|
||||
name, desc := lang.beadName(bt), lang.beadDescription(bt)
|
||||
bf.WriteBytes(stringsupport.PaddedString(name, 32, true))
|
||||
bf.WriteBytes(stringsupport.PaddedString(desc, 512, true))
|
||||
bf.WriteUint8(uint8(i + 1)) // color_id: slot 1..N
|
||||
bf.WriteUint8(uint8(bt)) // bead_type: effect ID
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfSetKiju(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfSetKiju)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00, 0x00, 0x00, 0x00})
|
||||
beadIndex := int(pkt.Unk1)
|
||||
expiry := TimeAdjusted().Add(24 * time.Hour)
|
||||
if err := s.server.divaRepo.AssignBead(s.charID, beadIndex, expiry); err != nil {
|
||||
s.logger.Warn("Failed to assign bead",
|
||||
zap.Uint32("charID", s.charID),
|
||||
zap.Int("beadIndex", beadIndex),
|
||||
zap.Error(err))
|
||||
} else {
|
||||
s.currentBeadIndex = beadIndex
|
||||
}
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00})
|
||||
}
|
||||
|
||||
func handleMsgMhfAddUdPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
@@ -169,6 +206,17 @@ func handleMsgMhfAddUdPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
zap.Uint32("bonusPoints", pkt.BonusPoints),
|
||||
zap.Error(err))
|
||||
}
|
||||
if s.currentBeadIndex >= 0 {
|
||||
total := int(pkt.QuestPoints) + int(pkt.BonusPoints)
|
||||
if total > 0 {
|
||||
if err := s.server.divaRepo.AddBeadPoints(s.charID, s.currentBeadIndex, total); err != nil {
|
||||
s.logger.Warn("Failed to add bead points",
|
||||
zap.Uint32("charID", s.charID),
|
||||
zap.Int("beadIndex", s.currentBeadIndex),
|
||||
zap.Error(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00, 0x00, 0x00, 0x00})
|
||||
@@ -176,23 +224,92 @@ func handleMsgMhfAddUdPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
func handleMsgMhfGetUdMyPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdMyPoint)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("00040000013C000000FA000000000000000000040000007E0000003C02000000000000000000000000000000000000000000000000000002000004CC00000438000000000000000000000000000000000000000000000000000000020000026E00000230000000000000000000020000007D0000007D000000000000000000000000000000000000000000000000000000")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
|
||||
// RE confirms: no count prefix. Client hardcodes exactly 8 loop iterations.
|
||||
// Per-entry stride is 18 bytes:
|
||||
// +0x00 u8 bead_index
|
||||
// +0x01 u32 points
|
||||
// +0x05 u32 points_dupe (same value as points)
|
||||
// +0x09 u8 unk1 (half-period: 0=first 12h, 1=second 12h)
|
||||
// +0x0A u32 unk2
|
||||
// +0x0E u32 unk3
|
||||
// Total: 8 × 18 = 144 bytes.
|
||||
beadPoints, err := s.server.divaRepo.GetCharacterBeadPoints(s.charID)
|
||||
if err != nil {
|
||||
s.logger.Warn("Failed to get bead points", zap.Uint32("charID", s.charID), zap.Error(err))
|
||||
beadPoints = map[int]int{}
|
||||
}
|
||||
activeBead := uint8(0)
|
||||
if s.currentBeadIndex >= 0 {
|
||||
activeBead = uint8(s.currentBeadIndex)
|
||||
}
|
||||
pts := uint32(0)
|
||||
if s.currentBeadIndex >= 0 {
|
||||
if p, ok := beadPoints[s.currentBeadIndex]; ok {
|
||||
pts = uint32(p)
|
||||
}
|
||||
}
|
||||
bf := byteframe.NewByteFrame()
|
||||
for i := 0; i < 8; i++ {
|
||||
bf.WriteUint8(activeBead)
|
||||
bf.WriteUint32(pts)
|
||||
bf.WriteUint32(pts) // points_dupe
|
||||
bf.WriteUint8(uint8(i % 2)) // unk1: 0=first half, 1=second half
|
||||
bf.WriteUint32(0) // unk2
|
||||
bf.WriteUint32(0) // unk3
|
||||
}
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
// udMilestones are the global contribution milestones for Diva Defense.
|
||||
// RE confirms: 64 × u64 target_values + 64 × u8 target_types + u64 total = ~585 bytes.
|
||||
// Slots 0–12 are populated; slots 13–63 are zero.
|
||||
var udMilestones = []uint64{
|
||||
500000, 1000000, 2000000, 3000000, 5000000, 7000000, 10000000,
|
||||
15000000, 20000000, 30000000, 50000000, 70000000, 100000000,
|
||||
}
|
||||
|
||||
func handleMsgMhfGetUdTotalPointInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdTotalPointInfo)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("00000000000007A12000000000000F424000000000001E848000000000002DC6C000000000003D090000000000004C4B4000000000005B8D8000000000006ACFC000000000007A1200000000000089544000000000009896800000000000E4E1C00000000001312D0000000000017D78400000000001C9C3800000000002160EC00000000002625A000000000002AEA5400000000002FAF0800000000003473BC0000000000393870000000000042C1D800000000004C4B40000000000055D4A800000000005F5E10000000000008954400000000001C9C3800000000003473BC00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001020300000000000000000000000000000000000000000000000000000000000000000000000000000000101F1420")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
|
||||
total, err := s.server.divaRepo.GetTotalBeadPoints()
|
||||
if err != nil {
|
||||
s.logger.Warn("Failed to get total bead points", zap.Error(err))
|
||||
}
|
||||
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint8(0) // error = success
|
||||
// 64 × u64 target_values (big-endian)
|
||||
for i := 0; i < 64; i++ {
|
||||
var v uint64
|
||||
if i < len(udMilestones) {
|
||||
v = udMilestones[i]
|
||||
}
|
||||
bf.WriteUint64(v)
|
||||
}
|
||||
// 64 × u8 target_types (0 = global)
|
||||
for i := 0; i < 64; i++ {
|
||||
bf.WriteUint8(0)
|
||||
}
|
||||
// u64 total_souls
|
||||
bf.WriteUint64(uint64(total))
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetUdSelectedColorInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdSelectedColorInfo)
|
||||
|
||||
// Unk
|
||||
doAckBufSucceed(s, pkt.AckHandle, []byte{0x00, 0x01, 0x01, 0x01, 0x02, 0x03, 0x02, 0x00, 0x00})
|
||||
// RE confirms: exactly 9 bytes = u8 error + u8[8] winning colors.
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint8(0) // error = success
|
||||
for day := 0; day < 8; day++ {
|
||||
topBead, err := s.server.divaRepo.GetTopBeadPerDay(day)
|
||||
if err != nil {
|
||||
topBead = 0
|
||||
}
|
||||
bf.WriteUint8(uint8(topBead))
|
||||
}
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetUdMonsterPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
@@ -329,16 +446,25 @@ func handleMsgMhfGetUdMonsterPoint(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
func handleMsgMhfGetUdDailyPresentList(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdDailyPresentList)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("0100001600000A5397DF00000000000000000000000000000000")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
// DailyPresentList: u16 count + count × 15-byte entries.
|
||||
// Entry: u8 rank_type, u16 rank_from, u16 rank_to, u8 item_type,
|
||||
// u16 _pad0(skip), u16 item_id, u16 _pad1(skip), u16 quantity, u8 unk.
|
||||
// Padding at +6 and +10 is NOT read by the client.
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint16(0) // count = 0 (no entries configured)
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetUdNormaPresentList(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdNormaPresentList)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("0100001600000A5397DF00000000000000000000000000000000")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
// NormaPresentList: u16 count + count × 19-byte entries.
|
||||
// Same layout as DailyPresent (+0x00..+0x0D), plus:
|
||||
// +0x0E u32 points_required (norma threshold)
|
||||
// +0x12 u8 bead_type (BeadType that unlocks this tier)
|
||||
// Padding at +6 and +10 NOT read.
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint16(0) // count = 0 (no entries configured)
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfAcquireUdItem(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
@@ -26,65 +26,6 @@ func handleMsgMhfLoadMezfesData(s *Session, p mhfpacket.MHFPacket) {
|
||||
[]byte{0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateRanking(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateRanking)
|
||||
bf := byteframe.NewByteFrame()
|
||||
state := s.server.erupeConfig.DebugOptions.TournamentOverride
|
||||
// Unk
|
||||
// Unk
|
||||
// Start?
|
||||
// End?
|
||||
midnight := TimeMidnight()
|
||||
switch state {
|
||||
case 1:
|
||||
bf.WriteUint32(uint32(midnight.Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(3 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(13 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(20 * 24 * time.Hour).Unix()))
|
||||
case 2:
|
||||
bf.WriteUint32(uint32(midnight.Add(-3 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(10 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(17 * 24 * time.Hour).Unix()))
|
||||
case 3:
|
||||
bf.WriteUint32(uint32(midnight.Add(-13 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(-10 * 24 * time.Hour).Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Unix()))
|
||||
bf.WriteUint32(uint32(midnight.Add(7 * 24 * time.Hour).Unix()))
|
||||
default:
|
||||
bf.WriteBytes(make([]byte, 16))
|
||||
bf.WriteUint32(uint32(TimeAdjusted().Unix())) // TS Current Time
|
||||
bf.WriteUint8(3)
|
||||
bf.WriteBytes(make([]byte, 4))
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
return
|
||||
}
|
||||
bf.WriteUint32(uint32(TimeAdjusted().Unix())) // TS Current Time
|
||||
bf.WriteUint8(3)
|
||||
ps.Uint8(bf, "", false)
|
||||
bf.WriteUint16(0) // numEvents
|
||||
bf.WriteUint8(0) // numCups
|
||||
|
||||
/*
|
||||
struct event
|
||||
uint32 eventID
|
||||
uint16 unk
|
||||
uint16 unk
|
||||
uint32 unk
|
||||
psUint8 name
|
||||
|
||||
struct cup
|
||||
uint32 cupID
|
||||
uint16 unk
|
||||
uint16 unk
|
||||
uint16 unk
|
||||
psUint8 name
|
||||
psUint16 desc
|
||||
*/
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
// Festa timing constants (all values in seconds)
|
||||
const (
|
||||
festaVotingDuration = 9000 // 150 min voting window
|
||||
|
||||
@@ -372,7 +372,39 @@ func handleMsgMhfReadGuildcard(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
func handleMsgMhfEntryRookieGuild(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEntryRookieGuild)
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
|
||||
// pkt.Unk==0: fresh rookie entering a rookie guild (return_type=1).
|
||||
// pkt.Unk>=1: returning player entering a comeback/return guild (return_type=2).
|
||||
returnType := uint8(1)
|
||||
nameTemplate := s.server.i18n.guild.rookieGuildName
|
||||
if pkt.Unk >= 1 {
|
||||
returnType = 2
|
||||
nameTemplate = s.server.i18n.guild.returnGuildName
|
||||
}
|
||||
|
||||
guildID, err := s.server.guildRepo.FindOrCreateReturnGuild(returnType, nameTemplate)
|
||||
if err != nil {
|
||||
s.logger.Error("failed to find/create return guild",
|
||||
zap.Uint32("charID", s.charID),
|
||||
zap.Error(err),
|
||||
)
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.server.guildRepo.AddMember(guildID, s.charID); err != nil {
|
||||
s.logger.Error("failed to add character to return guild",
|
||||
zap.Uint32("charID", s.charID),
|
||||
zap.Uint32("guildID", guildID),
|
||||
zap.Error(err),
|
||||
)
|
||||
doAckSimpleFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint32(guildID)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfUpdateForceGuildRank(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
|
||||
@@ -98,9 +98,9 @@ func handleMsgMhfInfoGuild(s *Session, p mhfpacket.MHFPacket) {
|
||||
bf.WriteInt8(int8(FestivalColorCodes[guild.FestivalColor]))
|
||||
bf.WriteUint32(guild.RankRP)
|
||||
bf.WriteBytes(guildLeaderName)
|
||||
bf.WriteUint32(0) // Unk
|
||||
bf.WriteBool(false) // isReturnGuild
|
||||
bf.WriteBool(false) // earnedSpecialHall
|
||||
bf.WriteUint32(0) // Unk
|
||||
bf.WriteBool(guild.ReturnType > 0) // isReturnGuild
|
||||
bf.WriteBool(false) // earnedSpecialHall
|
||||
bf.WriteUint8(2)
|
||||
bf.WriteUint8(2)
|
||||
bf.WriteUint32(guild.EventRP) // Skipped if last byte is <2?
|
||||
|
||||
@@ -125,6 +125,13 @@ func handleMsgMhfOperateGuild(s *Session, p mhfpacket.MHFPacket) {
|
||||
s.logger.Error("Failed to exchange guild event RP", zap.Error(err))
|
||||
}
|
||||
bf.WriteUint32(balance)
|
||||
case mhfpacket.OperateGuildGraduateRookie, mhfpacket.OperateGuildGraduateReturn:
|
||||
// Player graduates (leaves) a temporary return/rookie guild.
|
||||
// No extra packet data — just remove and succeed.
|
||||
isApplicant := characterGuildInfo != nil && characterGuildInfo.IsApplicant
|
||||
if _, err := s.server.guildService.Leave(s.charID, guild.ID, isApplicant, guild.Name); err != nil {
|
||||
s.logger.Error("Failed to graduate from return guild", zap.Error(err))
|
||||
}
|
||||
default:
|
||||
s.logger.Error("unhandled operate guild action", zap.Uint8("action", uint8(pkt.Action)))
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ func handleMsgMhfCancelGuildScout(s *Session, p mhfpacket.MHFPacket) {
|
||||
return
|
||||
}
|
||||
|
||||
err = s.server.guildRepo.CancelInvitation(guild.ID, pkt.InvitationID)
|
||||
err = s.server.guildRepo.CancelInvite(pkt.InvitationID)
|
||||
|
||||
if err != nil {
|
||||
doAckBufFail(s, pkt.AckHandle, make([]byte, 4))
|
||||
@@ -123,28 +123,25 @@ func handleMsgMhfGetGuildScoutList(s *Session, p mhfpacket.MHFPacket) {
|
||||
}
|
||||
}
|
||||
|
||||
chars, err := s.server.guildRepo.ListInvitedCharacters(guildInfo.ID)
|
||||
invites, err := s.server.guildRepo.ListInvites(guildInfo.ID)
|
||||
if err != nil {
|
||||
s.logger.Error("failed to retrieve scouted characters", zap.Error(err))
|
||||
s.logger.Error("failed to retrieve scout invites", zap.Error(err))
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.SetBE()
|
||||
bf.WriteUint32(uint32(len(chars)))
|
||||
bf.WriteUint32(uint32(len(invites)))
|
||||
|
||||
for _, sc := range chars {
|
||||
// This seems to be used as a unique ID for the invitation sent
|
||||
// we can just use the charID and then filter on guild_id+charID when performing operations
|
||||
// this might be a problem later with mails sent referencing IDs but we'll see.
|
||||
bf.WriteUint32(sc.CharID)
|
||||
bf.WriteUint32(sc.ActorID)
|
||||
bf.WriteUint32(sc.CharID)
|
||||
bf.WriteUint32(uint32(TimeAdjusted().Unix()))
|
||||
bf.WriteUint16(sc.HR)
|
||||
bf.WriteUint16(sc.GR)
|
||||
bf.WriteBytes(stringsupport.PaddedString(sc.Name, 32, true))
|
||||
for _, inv := range invites {
|
||||
bf.WriteUint32(inv.ID)
|
||||
bf.WriteUint32(inv.ActorID)
|
||||
bf.WriteUint32(inv.CharID)
|
||||
bf.WriteUint32(uint32(inv.InvitedAt.Unix()))
|
||||
bf.WriteUint16(inv.HR)
|
||||
bf.WriteUint16(inv.GR)
|
||||
bf.WriteBytes(stringsupport.PaddedString(inv.Name, 32, true))
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
|
||||
@@ -12,7 +12,7 @@ func TestAnswerGuildScout_Accept(t *testing.T) {
|
||||
server := createMockServer()
|
||||
mailMock := &mockMailRepo{}
|
||||
guildMock := &mockGuildRepo{
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInviteResult: true,
|
||||
}
|
||||
guildMock.guild = &Guild{ID: 10, Name: "TestGuild"}
|
||||
guildMock.guild.LeaderCharID = 50
|
||||
@@ -29,8 +29,8 @@ func TestAnswerGuildScout_Accept(t *testing.T) {
|
||||
|
||||
handleMsgMhfAnswerGuildScout(session, pkt)
|
||||
|
||||
if guildMock.acceptedCharID != 1 {
|
||||
t.Errorf("AcceptApplication charID = %d, want 1", guildMock.acceptedCharID)
|
||||
if guildMock.acceptInviteCharID != 1 {
|
||||
t.Errorf("AcceptInvite charID = %d, want 1", guildMock.acceptInviteCharID)
|
||||
}
|
||||
if len(mailMock.sentMails) != 2 {
|
||||
t.Fatalf("Expected 2 mails (self + leader), got %d", len(mailMock.sentMails))
|
||||
@@ -47,7 +47,7 @@ func TestAnswerGuildScout_Decline(t *testing.T) {
|
||||
server := createMockServer()
|
||||
mailMock := &mockMailRepo{}
|
||||
guildMock := &mockGuildRepo{
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInviteResult: true,
|
||||
}
|
||||
guildMock.guild = &Guild{ID: 10, Name: "TestGuild"}
|
||||
guildMock.guild.LeaderCharID = 50
|
||||
@@ -64,8 +64,8 @@ func TestAnswerGuildScout_Decline(t *testing.T) {
|
||||
|
||||
handleMsgMhfAnswerGuildScout(session, pkt)
|
||||
|
||||
if guildMock.rejectedCharID != 1 {
|
||||
t.Errorf("RejectApplication charID = %d, want 1", guildMock.rejectedCharID)
|
||||
if guildMock.declineInviteCharID != 1 {
|
||||
t.Errorf("DeclineInvite charID = %d, want 1", guildMock.declineInviteCharID)
|
||||
}
|
||||
if len(mailMock.sentMails) != 2 {
|
||||
t.Fatalf("Expected 2 mails (self + leader), got %d", len(mailMock.sentMails))
|
||||
@@ -101,7 +101,7 @@ func TestAnswerGuildScout_ApplicationMissing(t *testing.T) {
|
||||
server := createMockServer()
|
||||
mailMock := &mockMailRepo{}
|
||||
guildMock := &mockGuildRepo{
|
||||
application: nil, // no application found
|
||||
hasInviteResult: false, // no invite found
|
||||
}
|
||||
guildMock.guild = &Guild{ID: 10, Name: "TestGuild"}
|
||||
guildMock.guild.LeaderCharID = 50
|
||||
@@ -134,7 +134,7 @@ func TestAnswerGuildScout_MailError(t *testing.T) {
|
||||
server := createMockServer()
|
||||
mailMock := &mockMailRepo{sendErr: errNotFound}
|
||||
guildMock := &mockGuildRepo{
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInviteResult: true,
|
||||
}
|
||||
guildMock.guild = &Guild{ID: 10, Name: "TestGuild"}
|
||||
guildMock.guild.LeaderCharID = 50
|
||||
|
||||
@@ -923,23 +923,36 @@ func TestCheckMonthlyItem_UnknownType(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestHandleMsgMhfEntryRookieGuild(t *testing.T) {
|
||||
server := createMockServer()
|
||||
session := createMockSession(1, server)
|
||||
|
||||
pkt := &mhfpacket.MsgMhfEntryRookieGuild{
|
||||
AckHandle: 12345,
|
||||
Unk: 42,
|
||||
tests := []struct {
|
||||
name string
|
||||
unk uint32
|
||||
}{
|
||||
{"rookie (Unk=0)", 0},
|
||||
{"comeback (Unk=1)", 1},
|
||||
{"comeback with hr (Unk=2)", 2},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
server := createMockServer()
|
||||
server.guildRepo = &mockGuildRepo{}
|
||||
session := createMockSession(1, server)
|
||||
|
||||
handleMsgMhfEntryRookieGuild(session, pkt)
|
||||
pkt := &mhfpacket.MsgMhfEntryRookieGuild{
|
||||
AckHandle: 12345,
|
||||
Unk: tt.unk,
|
||||
}
|
||||
|
||||
select {
|
||||
case p := <-session.sendPackets:
|
||||
if len(p.data) == 0 {
|
||||
t.Error("Response packet should have data")
|
||||
}
|
||||
default:
|
||||
t.Error("No response packet queued")
|
||||
handleMsgMhfEntryRookieGuild(session, pkt)
|
||||
|
||||
select {
|
||||
case p := <-session.sendPackets:
|
||||
if len(p.data) == 0 {
|
||||
t.Error("Response packet should have data")
|
||||
}
|
||||
default:
|
||||
t.Error("No response packet queued")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,11 +9,6 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func handleMsgMhfTransferItem(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfTransferItem)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00, 0x00, 0x00, 0x00})
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumeratePrice(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumeratePrice)
|
||||
bf := byteframe.NewByteFrame()
|
||||
@@ -50,10 +45,6 @@ func handleMsgMhfEnumeratePrice(s *Session, p mhfpacket.MHFPacket) {
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateOrder(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateOrder)
|
||||
stubEnumerateNoResults(s, pkt.AckHandle)
|
||||
}
|
||||
|
||||
func handleMsgMhfGetExtraInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetExtraInfo)
|
||||
|
||||
@@ -147,11 +147,39 @@ func handleMsgMhfGetSenyuDailyCount(s *Session, p mhfpacket.MHFPacket) {
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetDailyMissionMaster(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
// handleMsgMhfGetDailyMissionMaster returns an empty daily mission master list.
|
||||
// The full response format is not yet reverse-engineered; count=0 is safe.
|
||||
func handleMsgMhfGetDailyMissionMaster(s *Session, p mhfpacket.MHFPacket) {
|
||||
if p == nil {
|
||||
return
|
||||
}
|
||||
pkt := p.(*mhfpacket.MsgMhfGetDailyMissionMaster)
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint32(0) // entry count = 0
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetDailyMissionPersonal(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
// handleMsgMhfGetDailyMissionPersonal returns an empty personal daily mission progress list.
|
||||
// The full response format is not yet reverse-engineered; count=0 is safe.
|
||||
func handleMsgMhfGetDailyMissionPersonal(s *Session, p mhfpacket.MHFPacket) {
|
||||
if p == nil {
|
||||
return
|
||||
}
|
||||
pkt := p.(*mhfpacket.MsgMhfGetDailyMissionPersonal)
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint32(0) // entry count = 0
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfSetDailyMissionPersonal(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
// handleMsgMhfSetDailyMissionPersonal acknowledges a personal daily mission progress write.
|
||||
// The full request/response format is not yet reverse-engineered.
|
||||
func handleMsgMhfSetDailyMissionPersonal(s *Session, p mhfpacket.MHFPacket) {
|
||||
if p == nil {
|
||||
return
|
||||
}
|
||||
pkt := p.(*mhfpacket.MsgMhfSetDailyMissionPersonal)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
// Equip skin history buffer sizes per game version
|
||||
const (
|
||||
|
||||
@@ -107,10 +107,9 @@ func handleMsgSysGetFile(s *Session, p mhfpacket.MHFPacket) {
|
||||
)
|
||||
}
|
||||
filename := fmt.Sprintf("%d_0_0_0_S%d_T%d_C%d", pkt.ScenarioIdentifer.CategoryID, pkt.ScenarioIdentifer.MainID, pkt.ScenarioIdentifer.Flags, pkt.ScenarioIdentifer.ChapterID)
|
||||
// Read the scenario file.
|
||||
data, err := os.ReadFile(filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("scenarios/%s.bin", filename)))
|
||||
data, err := loadScenarioBinary(s, filename)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to open scenario file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", filename))
|
||||
s.logger.Error("Failed to open scenario file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", filename), zap.Error(err))
|
||||
doAckBufFail(s, pkt.AckHandle, nil)
|
||||
return
|
||||
}
|
||||
@@ -127,9 +126,9 @@ func handleMsgSysGetFile(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt.Filename = seasonConversion(s, pkt.Filename)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("quests/%s.bin", pkt.Filename)))
|
||||
data, err := loadQuestBinary(s, pkt.Filename)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to open quest file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", pkt.Filename))
|
||||
s.logger.Error("Failed to open quest file", zap.String("binPath", s.server.erupeConfig.BinPath), zap.String("filename", pkt.Filename), zap.Error(err))
|
||||
doAckBufFail(s, pkt.AckHandle, nil)
|
||||
return
|
||||
}
|
||||
@@ -141,10 +140,54 @@ func handleMsgSysGetFile(s *Session, p mhfpacket.MHFPacket) {
|
||||
}
|
||||
|
||||
func questFileExists(s *Session, filename string) bool {
|
||||
_, err := os.Stat(filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("quests/%s.bin", filename)))
|
||||
base := filepath.Join(s.server.erupeConfig.BinPath, "quests", filename)
|
||||
if _, err := os.Stat(base + ".bin"); err == nil {
|
||||
return true
|
||||
}
|
||||
_, err := os.Stat(base + ".json")
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// loadQuestBinary loads a quest file by name, trying .bin first then .json.
|
||||
// For .json files it compiles the JSON to the MHF binary wire format.
|
||||
func loadQuestBinary(s *Session, filename string) ([]byte, error) {
|
||||
base := filepath.Join(s.server.erupeConfig.BinPath, "quests", filename)
|
||||
|
||||
if data, err := os.ReadFile(base + ".bin"); err == nil {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
jsonData, err := os.ReadFile(base + ".json")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
compiled, err := CompileQuestJSON(jsonData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("compile quest JSON %s: %w", filename, err)
|
||||
}
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// loadScenarioBinary loads a scenario file by name, trying .bin first then .json.
|
||||
// For .json files it compiles the JSON to the MHF binary wire format.
|
||||
func loadScenarioBinary(s *Session, filename string) ([]byte, error) {
|
||||
base := filepath.Join(s.server.erupeConfig.BinPath, "scenarios", filename)
|
||||
|
||||
if data, err := os.ReadFile(base + ".bin"); err == nil {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
jsonData, err := os.ReadFile(base + ".json")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("compile scenario JSON %s: %w", filename, err)
|
||||
}
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
func seasonConversion(s *Session, questFile string) string {
|
||||
// Try the seasonal override file (e.g., 00001d2 for season 2)
|
||||
filename := fmt.Sprintf("%s%d", questFile[:6], s.server.Season())
|
||||
@@ -209,12 +252,22 @@ func loadQuestFile(s *Session, questId int) []byte {
|
||||
return cached
|
||||
}
|
||||
|
||||
file, err := os.ReadFile(filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("quests/%05dd0.bin", questId)))
|
||||
if err != nil {
|
||||
base := filepath.Join(s.server.erupeConfig.BinPath, fmt.Sprintf("quests/%05dd0", questId))
|
||||
var decrypted []byte
|
||||
if data, err := os.ReadFile(base + ".bin"); err == nil {
|
||||
decrypted = decryption.UnpackSimple(data)
|
||||
} else if jsonData, err := os.ReadFile(base + ".json"); err == nil {
|
||||
compiled, err := CompileQuestJSON(jsonData)
|
||||
if err != nil {
|
||||
s.logger.Error("loadQuestFile: failed to compile quest JSON",
|
||||
zap.Int("questId", questId), zap.Error(err))
|
||||
return nil
|
||||
}
|
||||
decrypted = compiled
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
decrypted := decryption.UnpackSimple(file)
|
||||
if s.server.erupeConfig.RealClientMode <= cfg.Z1 && s.server.erupeConfig.DebugOptions.AutoQuestBackport {
|
||||
decrypted = BackportQuest(decrypted, s.server.erupeConfig.RealClientMode)
|
||||
}
|
||||
@@ -290,7 +343,34 @@ func makeEventQuest(s *Session, eq EventQuest) ([]byte, error) {
|
||||
}
|
||||
bf.WriteUint8(eq.QuestType)
|
||||
if eq.QuestType == QuestTypeSpecialTool {
|
||||
bf.WriteBool(false)
|
||||
var stamps, required int
|
||||
var deadline time.Time
|
||||
err := s.server.db.QueryRow(`SELECT COUNT(*) FROM campaign_state WHERE campaign_id = (
|
||||
SELECT campaign_id
|
||||
FROM campaign_rewards
|
||||
WHERE item_type = 9
|
||||
AND item_id = $1
|
||||
LIMIT 1
|
||||
) AND character_id = $2`, eq.QuestID, s.charID).Scan(&stamps)
|
||||
if err != nil {
|
||||
bf.WriteBool(false)
|
||||
} else {
|
||||
err = s.server.db.QueryRow(`SELECT stamps, end_time
|
||||
FROM campaigns
|
||||
WHERE id = (
|
||||
SELECT campaign_id
|
||||
FROM campaign_rewards
|
||||
WHERE item_type = 9
|
||||
AND item_id = $1
|
||||
LIMIT 1
|
||||
)`, eq.QuestID).Scan(&required, &deadline)
|
||||
required = campaignRequiredStamps(required)
|
||||
if err == nil && stamps >= required && deadline.After(time.Now()) {
|
||||
bf.WriteBool(true)
|
||||
} else {
|
||||
bf.WriteBool(false)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
bf.WriteBool(true)
|
||||
}
|
||||
@@ -654,7 +734,6 @@ func getTuneValueRange(start uint16, value uint16) []tuneValue {
|
||||
return tv
|
||||
}
|
||||
|
||||
func handleMsgMhfEnterTournamentQuest(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
|
||||
func handleMsgMhfGetUdBonusQuestInfo(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdBonusQuestInfo)
|
||||
|
||||
@@ -604,17 +604,16 @@ func TestQuestFileLoadingErrors(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestTournamentQuestEntryStub tests the stub tournament quest handler
|
||||
func TestTournamentQuestEntryStub(t *testing.T) {
|
||||
// TestTournamentQuestEntryHandler tests the tournament quest entry handler.
|
||||
func TestTournamentQuestEntryHandler(t *testing.T) {
|
||||
mockConn := &MockCryptConn{sentPackets: make([][]byte, 0)}
|
||||
s := createTestSession(mockConn)
|
||||
s.server.tournamentRepo = &mockTournamentRepo{}
|
||||
|
||||
pkt := &mhfpacket.MsgMhfEnterTournamentQuest{}
|
||||
pkt := &mhfpacket.MsgMhfEnterTournamentQuest{AckHandle: 1}
|
||||
|
||||
// This tests that the stub function doesn't panic
|
||||
handleMsgMhfEnterTournamentQuest(s, pkt)
|
||||
|
||||
// Verify no crash occurred (pass if we reach here)
|
||||
if s.logger == nil {
|
||||
t.Errorf("Session corrupted")
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
|
||||
"erupe-ce/common/byteframe"
|
||||
"erupe-ce/network/mhfpacket"
|
||||
)
|
||||
@@ -18,21 +16,44 @@ func handleMsgMhfGetAdditionalBeatReward(s *Session, p mhfpacket.MHFPacket) {
|
||||
|
||||
func handleMsgMhfGetUdRankingRewardList(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetUdRankingRewardList)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("0100001600000A5397DF00000000000000000000000000000000")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
// RankingRewardList: u16 count + count × 14-byte entries.
|
||||
// Entry: u8 rank_type, u16 rank_from, u16 rank_to, u8 item_type,
|
||||
// u32 item_id, u32 quantity. No padding gaps.
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint16(0) // count = 0 (no entries configured)
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfGetRewardSong(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfGetRewardSong)
|
||||
// Temporary canned response
|
||||
data, _ := hex.DecodeString("0100001600000A5397DF00000000000000000000000000000000")
|
||||
doAckBufSucceed(s, pkt.AckHandle, data)
|
||||
// RE-confirmed layout (22 bytes):
|
||||
// +0x00 u8 error
|
||||
// +0x01 u8 usage_count
|
||||
// +0x02 u32 prayer_id
|
||||
// +0x06 u32 prayer_end (0xFFFFFFFF = no active prayer)
|
||||
// then 4 × (u8 color_error, u8 color_id, u8 color_usage_count)
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint8(0) // error
|
||||
bf.WriteUint8(0) // usage_count
|
||||
bf.WriteUint32(0) // prayer_id
|
||||
bf.WriteUint32(0xFFFFFFFF) // prayer_end: no active prayer
|
||||
for colorID := uint8(1); colorID <= 4; colorID++ {
|
||||
bf.WriteUint8(0) // color_error
|
||||
bf.WriteUint8(colorID) // color_id
|
||||
bf.WriteUint8(0) // color_usage_count
|
||||
}
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfUseRewardSong(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
func handleMsgMhfUseRewardSong(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfUseRewardSong)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00})
|
||||
}
|
||||
|
||||
func handleMsgMhfAddRewardSongCount(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
func handleMsgMhfAddRewardSongCount(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfAddRewardSongCount)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, []byte{0x00})
|
||||
}
|
||||
|
||||
func handleMsgMhfAcquireMonthlyReward(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfAcquireMonthlyReward)
|
||||
|
||||
@@ -70,26 +70,34 @@ func TestHandleMsgMhfUseRewardSong(t *testing.T) {
|
||||
server := createMockServer()
|
||||
session := createMockSession(1, server)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("handleMsgMhfUseRewardSong panicked: %v", r)
|
||||
}
|
||||
}()
|
||||
pkt := &mhfpacket.MsgMhfUseRewardSong{AckHandle: 12345}
|
||||
handleMsgMhfUseRewardSong(session, pkt)
|
||||
|
||||
handleMsgMhfUseRewardSong(session, nil)
|
||||
select {
|
||||
case p := <-session.sendPackets:
|
||||
if len(p.data) == 0 {
|
||||
t.Error("Response packet should have data")
|
||||
}
|
||||
default:
|
||||
t.Error("No response packet queued")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleMsgMhfAddRewardSongCount(t *testing.T) {
|
||||
server := createMockServer()
|
||||
session := createMockSession(1, server)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("handleMsgMhfAddRewardSongCount panicked: %v", r)
|
||||
}
|
||||
}()
|
||||
pkt := &mhfpacket.MsgMhfAddRewardSongCount{AckHandle: 42}
|
||||
handleMsgMhfAddRewardSongCount(session, pkt)
|
||||
|
||||
handleMsgMhfAddRewardSongCount(session, nil)
|
||||
select {
|
||||
case p := <-session.sendPackets:
|
||||
if len(p.data) == 0 {
|
||||
t.Error("Response packet should have data")
|
||||
}
|
||||
default:
|
||||
t.Error("No response packet queued")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleMsgMhfAcquireMonthlyReward(t *testing.T) {
|
||||
@@ -193,16 +201,15 @@ func TestEmptyHandlers_MiscFiles_Reward(t *testing.T) {
|
||||
server := createMockServer()
|
||||
session := createMockSession(1, server)
|
||||
|
||||
tests := []struct {
|
||||
// Handlers that accept nil and take no action (no AckHandle).
|
||||
nilSafeTests := []struct {
|
||||
name string
|
||||
fn func()
|
||||
}{
|
||||
{"handleMsgMhfUseRewardSong", func() { handleMsgMhfUseRewardSong(session, nil) }},
|
||||
{"handleMsgMhfAddRewardSongCount", func() { handleMsgMhfAddRewardSongCount(session, nil) }},
|
||||
{"handleMsgMhfAcceptReadReward", func() { handleMsgMhfAcceptReadReward(session, nil) }},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
for _, tt := range nilSafeTests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
@@ -212,4 +219,18 @@ func TestEmptyHandlers_MiscFiles_Reward(t *testing.T) {
|
||||
tt.fn()
|
||||
})
|
||||
}
|
||||
|
||||
// handleMsgMhfUseRewardSong is a real handler (requires a typed packet).
|
||||
t.Run("handleMsgMhfUseRewardSong", func(t *testing.T) {
|
||||
pkt := &mhfpacket.MsgMhfUseRewardSong{AckHandle: 1}
|
||||
handleMsgMhfUseRewardSong(session, pkt)
|
||||
select {
|
||||
case p := <-session.sendPackets:
|
||||
if len(p.data) == 0 {
|
||||
t.Error("handleMsgMhfUseRewardSong: response should have data")
|
||||
}
|
||||
default:
|
||||
t.Error("handleMsgMhfUseRewardSong: no response queued")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -33,16 +33,18 @@ func TestHandlerMsgMhfSexChanger(t *testing.T) {
|
||||
|
||||
func TestHandlerMsgMhfEnterTournamentQuest(t *testing.T) {
|
||||
server := createMockServer()
|
||||
server.tournamentRepo = &mockTournamentRepo{}
|
||||
session := createMockSession(1, server)
|
||||
|
||||
// Should not panic with nil packet (empty handler)
|
||||
pkt := &mhfpacket.MsgMhfEnterTournamentQuest{AckHandle: 1}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("handleMsgMhfEnterTournamentQuest panicked: %v", r)
|
||||
}
|
||||
}()
|
||||
|
||||
handleMsgMhfEnterTournamentQuest(session, nil)
|
||||
handleMsgMhfEnterTournamentQuest(session, pkt)
|
||||
}
|
||||
|
||||
func TestHandlerMsgMhfGetUdBonusQuestInfo(t *testing.T) {
|
||||
@@ -293,7 +295,6 @@ func TestEmptyHandlers_NoDb(t *testing.T) {
|
||||
{"handleMsgMhfKickExportForce", handleMsgMhfKickExportForce},
|
||||
{"handleMsgSysSetStatus", handleMsgSysSetStatus},
|
||||
{"handleMsgSysEcho", handleMsgSysEcho},
|
||||
{"handleMsgMhfEnterTournamentQuest", handleMsgMhfEnterTournamentQuest},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,8 +3,11 @@ package channelserver
|
||||
import (
|
||||
"erupe-ce/common/byteframe"
|
||||
ps "erupe-ce/common/pascalstring"
|
||||
cfg "erupe-ce/config"
|
||||
"erupe-ce/network/mhfpacket"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// TournamentInfo0 represents tournament information (type 0).
|
||||
@@ -46,73 +49,6 @@ type TournamentInfo22 struct {
|
||||
Unk4 string
|
||||
}
|
||||
|
||||
func handleMsgMhfInfoTournament(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfInfoTournament)
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
tournamentInfo0 := []TournamentInfo0{}
|
||||
tournamentInfo21 := []TournamentInfo21{}
|
||||
tournamentInfo22 := []TournamentInfo22{}
|
||||
|
||||
switch pkt.QueryType {
|
||||
case 0:
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint32(uint32(len(tournamentInfo0)))
|
||||
for _, tinfo := range tournamentInfo0 {
|
||||
bf.WriteUint32(tinfo.ID)
|
||||
bf.WriteUint32(tinfo.MaxPlayers)
|
||||
bf.WriteUint32(tinfo.CurrentPlayers)
|
||||
bf.WriteUint16(tinfo.Unk1)
|
||||
bf.WriteUint16(tinfo.TextColor)
|
||||
bf.WriteUint32(tinfo.Unk2)
|
||||
bf.WriteUint32(uint32(tinfo.Time1.Unix()))
|
||||
bf.WriteUint32(uint32(tinfo.Time2.Unix()))
|
||||
bf.WriteUint32(uint32(tinfo.Time3.Unix()))
|
||||
bf.WriteUint32(uint32(tinfo.Time4.Unix()))
|
||||
bf.WriteUint32(uint32(tinfo.Time5.Unix()))
|
||||
bf.WriteUint32(uint32(tinfo.Time6.Unix()))
|
||||
bf.WriteUint8(tinfo.Unk3)
|
||||
bf.WriteUint8(tinfo.Unk4)
|
||||
bf.WriteUint32(tinfo.MinHR)
|
||||
bf.WriteUint32(tinfo.MaxHR)
|
||||
ps.Uint8(bf, tinfo.Unk5, true)
|
||||
ps.Uint16(bf, tinfo.Unk6, true)
|
||||
}
|
||||
case 1:
|
||||
bf.WriteUint32(uint32(TimeAdjusted().Unix()))
|
||||
bf.WriteUint32(0) // Registered ID
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint8(0)
|
||||
bf.WriteUint32(0)
|
||||
ps.Uint8(bf, "", true)
|
||||
case 2:
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint32(uint32(len(tournamentInfo21)))
|
||||
for _, info := range tournamentInfo21 {
|
||||
bf.WriteUint32(info.Unk0)
|
||||
bf.WriteUint32(info.Unk1)
|
||||
bf.WriteUint32(info.Unk2)
|
||||
bf.WriteUint8(info.Unk3)
|
||||
}
|
||||
bf.WriteUint32(uint32(len(tournamentInfo22)))
|
||||
for _, info := range tournamentInfo22 {
|
||||
bf.WriteUint32(info.Unk0)
|
||||
bf.WriteUint32(info.Unk1)
|
||||
bf.WriteUint32(info.Unk2)
|
||||
bf.WriteUint8(info.Unk3)
|
||||
ps.Uint8(bf, info.Unk4, true)
|
||||
}
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfEntryTournament(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEntryTournament)
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
// TournamentReward represents a tournament reward entry.
|
||||
type TournamentReward struct {
|
||||
Unk0 uint16
|
||||
@@ -120,8 +56,254 @@ type TournamentReward struct {
|
||||
Unk2 uint16
|
||||
}
|
||||
|
||||
// tournamentState returns the state byte for the EnumerateRanking response.
|
||||
// 0 = no tournament / before start, 1 = registration open, 2 = hunting active,
|
||||
// 3 = ranking/reward period.
|
||||
func tournamentState(now int64, t *Tournament) uint8 {
|
||||
if t == nil || now < t.StartTime {
|
||||
return 0
|
||||
}
|
||||
if now <= t.EntryEnd {
|
||||
return 1
|
||||
}
|
||||
if now <= t.RankingEnd {
|
||||
return 2
|
||||
}
|
||||
return 3
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateRanking(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateRanking)
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
now := TimeAdjusted().Unix()
|
||||
tournament, err := s.server.tournamentRepo.GetActive(now)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get active tournament for EnumerateRanking", zap.Error(err))
|
||||
}
|
||||
|
||||
if tournament == nil {
|
||||
// No active tournament: write zeroed timestamps, current time, state 0, empty data.
|
||||
bf.WriteBytes(make([]byte, 16))
|
||||
bf.WriteUint32(uint32(now))
|
||||
bf.WriteUint8(0)
|
||||
ps.Uint8(bf, "", false)
|
||||
bf.WriteUint16(0) // numEvents
|
||||
bf.WriteUint8(0) // numCups
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
return
|
||||
}
|
||||
|
||||
state := tournamentState(now, tournament)
|
||||
|
||||
bf.WriteUint32(uint32(tournament.StartTime))
|
||||
bf.WriteUint32(uint32(tournament.EntryEnd))
|
||||
bf.WriteUint32(uint32(tournament.RankingEnd))
|
||||
bf.WriteUint32(uint32(tournament.RewardEnd))
|
||||
bf.WriteUint32(uint32(now))
|
||||
bf.WriteUint8(state)
|
||||
ps.Uint8(bf, tournament.Name, true)
|
||||
|
||||
subEvents, err := s.server.tournamentRepo.GetSubEvents()
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get tournament sub-events", zap.Error(err))
|
||||
subEvents = nil
|
||||
}
|
||||
bf.WriteUint16(uint16(len(subEvents)))
|
||||
for _, se := range subEvents {
|
||||
bf.WriteUint32(se.ID)
|
||||
bf.WriteUint16(uint16(se.CupGroup))
|
||||
bf.WriteInt16(se.EventSubType)
|
||||
bf.WriteUint32(se.QuestFileID)
|
||||
ps.Uint8(bf, se.Name, true)
|
||||
}
|
||||
|
||||
cups, err := s.server.tournamentRepo.GetCups(tournament.ID)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get tournament cups", zap.Error(err))
|
||||
cups = nil
|
||||
}
|
||||
bf.WriteUint8(uint8(len(cups)))
|
||||
for _, cup := range cups {
|
||||
bf.WriteUint32(cup.ID)
|
||||
bf.WriteUint16(uint16(cup.CupGroup))
|
||||
bf.WriteUint16(uint16(cup.CupType))
|
||||
bf.WriteUint16(uint16(cup.Unk))
|
||||
ps.Uint8(bf, cup.Name, true)
|
||||
ps.Uint16(bf, cup.Description, true)
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfEnumerateOrder(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnumerateOrder)
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
now := uint32(TimeAdjusted().Unix())
|
||||
bf.WriteUint32(pkt.EventID)
|
||||
bf.WriteUint32(now)
|
||||
|
||||
entries, err := s.server.tournamentRepo.GetLeaderboard(pkt.EventID)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get tournament leaderboard", zap.Error(err), zap.Uint32("eventID", pkt.EventID))
|
||||
entries = nil
|
||||
}
|
||||
|
||||
bf.WriteUint16(uint16(len(entries)))
|
||||
bf.WriteUint16(0) // unk
|
||||
|
||||
for _, e := range entries {
|
||||
bf.WriteUint32(e.CharID)
|
||||
bf.WriteUint32(e.Rank)
|
||||
bf.WriteUint16(e.Grade)
|
||||
bf.WriteUint16(0) // pad
|
||||
bf.WriteUint16(e.HR)
|
||||
if s.server.erupeConfig.RealClientMode >= cfg.G10 {
|
||||
bf.WriteUint16(e.GR)
|
||||
}
|
||||
bf.WriteUint16(0) // pad
|
||||
charNameBytes := []byte(e.CharName)
|
||||
guildNameBytes := []byte(e.GuildName)
|
||||
bf.WriteUint8(uint8(len(charNameBytes) + 1))
|
||||
bf.WriteUint8(uint8(len(guildNameBytes) + 1))
|
||||
bf.WriteNullTerminatedBytes(charNameBytes)
|
||||
bf.WriteNullTerminatedBytes(guildNameBytes)
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfInfoTournament(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfInfoTournament)
|
||||
bf := byteframe.NewByteFrame()
|
||||
|
||||
now := TimeAdjusted().Unix()
|
||||
|
||||
switch pkt.QueryType {
|
||||
case 0:
|
||||
tournament, err := s.server.tournamentRepo.GetActive(now)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get active tournament for InfoTournament type 0", zap.Error(err))
|
||||
}
|
||||
bf.WriteUint32(0) // unk header
|
||||
if tournament == nil {
|
||||
bf.WriteUint32(0) // count = 0
|
||||
break
|
||||
}
|
||||
bf.WriteUint32(1) // count
|
||||
bf.WriteUint32(tournament.ID)
|
||||
bf.WriteUint32(0) // MaxPlayers
|
||||
bf.WriteUint32(0) // CurrentPlayers
|
||||
bf.WriteUint16(0) // Unk1
|
||||
bf.WriteUint16(0) // TextColor
|
||||
bf.WriteUint32(0) // Unk2
|
||||
bf.WriteUint32(uint32(tournament.StartTime))
|
||||
bf.WriteUint32(uint32(tournament.EntryEnd))
|
||||
bf.WriteUint32(uint32(tournament.RankingEnd))
|
||||
bf.WriteUint32(uint32(tournament.RewardEnd))
|
||||
bf.WriteUint32(uint32(tournament.RewardEnd))
|
||||
bf.WriteUint32(uint32(tournament.RewardEnd))
|
||||
bf.WriteUint8(0) // Unk3
|
||||
bf.WriteUint8(0) // Unk4
|
||||
bf.WriteUint32(0) // MinHR
|
||||
bf.WriteUint32(0) // MaxHR
|
||||
ps.Uint8(bf, tournament.Name, true)
|
||||
ps.Uint16(bf, "", false)
|
||||
case 1:
|
||||
// Return player registration status.
|
||||
bf.WriteUint32(uint32(now))
|
||||
tournament, err := s.server.tournamentRepo.GetActive(now)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get active tournament for InfoTournament type 1", zap.Error(err))
|
||||
}
|
||||
if tournament == nil {
|
||||
bf.WriteUint32(0) // tournamentID
|
||||
bf.WriteUint32(0) // entryID
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint8(0) // not registered
|
||||
bf.WriteUint32(0)
|
||||
ps.Uint8(bf, "", true)
|
||||
break
|
||||
}
|
||||
entry, err := s.server.tournamentRepo.GetEntry(s.charID, tournament.ID)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get tournament entry for InfoTournament type 1", zap.Error(err))
|
||||
}
|
||||
bf.WriteUint32(tournament.ID)
|
||||
if entry != nil {
|
||||
bf.WriteUint32(entry.ID)
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint8(1) // registered
|
||||
} else {
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint8(0) // not registered
|
||||
}
|
||||
bf.WriteUint32(0)
|
||||
ps.Uint8(bf, tournament.Name, true)
|
||||
case 2:
|
||||
// Return empty lists (reward structures unknown).
|
||||
bf.WriteUint32(0)
|
||||
bf.WriteUint32(0) // count type 21
|
||||
bf.WriteUint32(0) // count type 22
|
||||
}
|
||||
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfEntryTournament(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEntryTournament)
|
||||
now := TimeAdjusted().Unix()
|
||||
|
||||
tournament, err := s.server.tournamentRepo.GetActive(now)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to get active tournament for EntryTournament", zap.Error(err))
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
if tournament == nil {
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
entryID, err := s.server.tournamentRepo.Register(s.charID, tournament.ID)
|
||||
if err != nil {
|
||||
s.logger.Error("Failed to register for tournament", zap.Error(err),
|
||||
zap.Uint32("charID", s.charID), zap.Uint32("tournamentID", tournament.ID))
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
return
|
||||
}
|
||||
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint32(entryID)
|
||||
doAckBufSucceed(s, pkt.AckHandle, bf.Data())
|
||||
}
|
||||
|
||||
func handleMsgMhfEnterTournamentQuest(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfEnterTournamentQuest)
|
||||
s.logger.Debug("EnterTournamentQuest",
|
||||
zap.Uint32("tournamentID", pkt.TournamentID),
|
||||
zap.Uint32("entryHandle", pkt.EntryHandle),
|
||||
zap.Uint32("unk2", pkt.Unk2),
|
||||
zap.Uint32("questSlot", pkt.QuestSlot),
|
||||
zap.Uint32("stageHandle", pkt.StageHandle),
|
||||
)
|
||||
if err := s.server.tournamentRepo.SubmitResult(
|
||||
s.charID,
|
||||
pkt.TournamentID,
|
||||
pkt.Unk2,
|
||||
pkt.QuestSlot,
|
||||
pkt.StageHandle,
|
||||
); err != nil {
|
||||
s.logger.Error("Failed to submit tournament result", zap.Error(err))
|
||||
}
|
||||
doAckSimpleSucceed(s, pkt.AckHandle, make([]byte, 4))
|
||||
}
|
||||
|
||||
func handleMsgMhfAcquireTournament(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgMhfAcquireTournament)
|
||||
// Reward item IDs are unknown. Return an empty reward list.
|
||||
rewards := []TournamentReward{}
|
||||
bf := byteframe.NewByteFrame()
|
||||
bf.WriteUint8(uint8(len(rewards)))
|
||||
|
||||
@@ -1,10 +1,41 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"erupe-ce/common/bfutil"
|
||||
"erupe-ce/common/stringsupport"
|
||||
"erupe-ce/network/mhfpacket"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// User binary expected sizes and offsets (from mhfo-hd.dll RE).
|
||||
// Types 4-5 are accepted by the server but never sent by the ZZ client.
|
||||
const (
|
||||
userBinaryNameMaxSize = 17 // Type 1: SJIS null-terminated name
|
||||
userBinaryProfileSize = 208 // Type 2: 0xD0 — player profile
|
||||
userBinaryEquipSize = 384 // Type 3: 0x180 — equipment/appearance
|
||||
|
||||
// Type 2 profile offsets
|
||||
profileNameOff = 0x0C // 25-byte SJIS name
|
||||
profileNameLen = 25
|
||||
profileIntroOff = 0x25 // 35-byte SJIS self-introduction
|
||||
profileIntroLen = 35
|
||||
profileGuildIDOff = 0x48 // u32 guild ID
|
||||
|
||||
// Type 3 equipment offsets
|
||||
equipHROff = 0x00 // u16 HR (XOR'd with session key)
|
||||
equipWeaponOff = 0x08 // 12-byte weapon entry
|
||||
equipHeadOff = 0x18 // 12-byte head armor entry
|
||||
equipChestOff = 0x24 // 12-byte chest armor entry
|
||||
equipArmsOff = 0x30 // 12-byte arms armor entry
|
||||
equipWaistOff = 0x3C // 12-byte waist armor entry
|
||||
equipLegsOff = 0x48 // 12-byte legs armor entry
|
||||
equipGuildIDOff = 0x64 // u32 guild ID
|
||||
equipGenderOff = 0x68 // u8 gender flag
|
||||
equipSharpnessOff = 0x69 // u8 sharpness level
|
||||
equipEntrySize = 12 // Each equipment entry: 3x u32
|
||||
)
|
||||
|
||||
func handleMsgSysInsertUser(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
|
||||
func handleMsgSysDeleteUser(s *Session, p mhfpacket.MHFPacket) {} // stub: unimplemented
|
||||
@@ -15,6 +46,9 @@ func handleMsgSysSetUserBinary(s *Session, p mhfpacket.MHFPacket) {
|
||||
s.logger.Warn("Invalid BinaryType", zap.Uint8("type", pkt.BinaryType))
|
||||
return
|
||||
}
|
||||
|
||||
logUserBinaryFields(s, pkt.BinaryType, pkt.RawDataPayload)
|
||||
|
||||
s.server.userBinary.Set(s.charID, pkt.BinaryType, pkt.RawDataPayload)
|
||||
|
||||
s.server.BroadcastMHF(&mhfpacket.MsgSysNotifyUserBinary{
|
||||
@@ -23,6 +57,101 @@ func handleMsgSysSetUserBinary(s *Session, p mhfpacket.MHFPacket) {
|
||||
}, s)
|
||||
}
|
||||
|
||||
// logUserBinaryFields parses and logs the structured fields of a user binary
|
||||
// payload based on its type. Logs a warning if the payload size does not match
|
||||
// the expected format from the client RE.
|
||||
func logUserBinaryFields(s *Session, binaryType uint8, data []byte) {
|
||||
switch binaryType {
|
||||
case 1:
|
||||
logUserBinaryName(s, data)
|
||||
case 2:
|
||||
logUserBinaryProfile(s, data)
|
||||
case 3:
|
||||
logUserBinaryEquipment(s, data)
|
||||
default:
|
||||
s.logger.Info("User binary received (unknown type)",
|
||||
zap.Uint8("type", binaryType),
|
||||
zap.Int("size", len(data)),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// logUserBinaryName parses type 1: character name (SJIS, null-terminated).
|
||||
func logUserBinaryName(s *Session, data []byte) {
|
||||
if len(data) == 0 {
|
||||
s.logger.Warn("User binary type 1 (name): empty payload",
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
return
|
||||
}
|
||||
if len(data) > userBinaryNameMaxSize {
|
||||
s.logger.Warn("User binary type 1 (name): payload exceeds expected max",
|
||||
zap.Int("size", len(data)),
|
||||
zap.Int("expected_max", userBinaryNameMaxSize),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
}
|
||||
name := stringsupport.SJISToUTF8Lossy(bfutil.UpToNull(data))
|
||||
s.logger.Info("User binary type 1 (name)",
|
||||
zap.String("name", name),
|
||||
zap.Int("size", len(data)),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
}
|
||||
|
||||
// logUserBinaryProfile parses type 2: player profile (208 bytes).
|
||||
func logUserBinaryProfile(s *Session, data []byte) {
|
||||
if len(data) != userBinaryProfileSize {
|
||||
s.logger.Warn("User binary type 2 (profile): unexpected size",
|
||||
zap.Int("size", len(data)),
|
||||
zap.Int("expected", userBinaryProfileSize),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
return
|
||||
}
|
||||
nameBytes := bfutil.UpToNull(data[profileNameOff : profileNameOff+profileNameLen])
|
||||
name := stringsupport.SJISToUTF8Lossy(nameBytes)
|
||||
|
||||
introBytes := bfutil.UpToNull(data[profileIntroOff : profileIntroOff+profileIntroLen])
|
||||
intro := stringsupport.SJISToUTF8Lossy(introBytes)
|
||||
|
||||
guildID := binary.BigEndian.Uint32(data[profileGuildIDOff : profileGuildIDOff+4])
|
||||
|
||||
s.logger.Info("User binary type 2 (profile)",
|
||||
zap.String("name", name),
|
||||
zap.String("self_intro", intro),
|
||||
zap.Uint32("guild_id", guildID),
|
||||
zap.Int("size", len(data)),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
}
|
||||
|
||||
// logUserBinaryEquipment parses type 3: equipment/appearance (384 bytes).
|
||||
func logUserBinaryEquipment(s *Session, data []byte) {
|
||||
if len(data) != userBinaryEquipSize {
|
||||
s.logger.Warn("User binary type 3 (equipment): unexpected size",
|
||||
zap.Int("size", len(data)),
|
||||
zap.Int("expected", userBinaryEquipSize),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
return
|
||||
}
|
||||
hr := binary.BigEndian.Uint16(data[equipHROff : equipHROff+2])
|
||||
guildID := binary.BigEndian.Uint32(data[equipGuildIDOff : equipGuildIDOff+4])
|
||||
gender := data[equipGenderOff]
|
||||
sharpness := data[equipSharpnessOff]
|
||||
|
||||
s.logger.Info("User binary type 3 (equipment)",
|
||||
zap.Uint16("hr_xored", hr),
|
||||
zap.Uint32("guild_id", guildID),
|
||||
zap.Uint8("gender", gender),
|
||||
zap.Uint8("sharpness", sharpness),
|
||||
zap.Int("size", len(data)),
|
||||
zap.Uint32("charID", s.charID),
|
||||
)
|
||||
}
|
||||
|
||||
func handleMsgSysGetUserBinary(s *Session, p mhfpacket.MHFPacket) {
|
||||
pkt := p.(*mhfpacket.MsgSysGetUserBinary)
|
||||
|
||||
|
||||
99
server/channelserver/lang_en.go
Normal file
99
server/channelserver/lang_en.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package channelserver
|
||||
|
||||
func langEnglish() i18n {
|
||||
var i i18n
|
||||
|
||||
i.language = "English"
|
||||
i.cafe.reset = "Resets on %d/%d"
|
||||
i.timer = "Time: %02d:%02d:%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "You don't have permission to use this command"
|
||||
i.commands.disabled = "%s command is disabled"
|
||||
i.commands.reload = "Reloading players..."
|
||||
i.commands.playtime = "Playtime: %d hours %d minutes %d seconds"
|
||||
|
||||
i.commands.kqf.get = "KQF: %x"
|
||||
i.commands.kqf.set.error = "Error in command. Format: %s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "KQF set, please switch Land/World"
|
||||
i.commands.kqf.version = "This command is disabled prior to MHFG10"
|
||||
i.commands.rights.error = "Error in command. Format: %s x"
|
||||
i.commands.rights.success = "Set rights integer: %d"
|
||||
i.commands.course.error = "Error in command. Format: %s <name>"
|
||||
i.commands.course.disabled = "%s Course disabled"
|
||||
i.commands.course.enabled = "%s Course enabled"
|
||||
i.commands.course.locked = "%s Course is locked"
|
||||
i.commands.teleport.error = "Error in command. Format: %s x y"
|
||||
i.commands.teleport.success = "Teleporting to %d %d"
|
||||
i.commands.psn.error = "Error in command. Format: %s <psn id>"
|
||||
i.commands.psn.success = "Connected PSN ID: %s"
|
||||
i.commands.psn.exists = "PSN ID is connected to another account!"
|
||||
|
||||
i.commands.discord.success = "Your Discord token: %s"
|
||||
|
||||
i.commands.ban.noUser = "Could not find user"
|
||||
i.commands.ban.success = "Successfully banned %s"
|
||||
i.commands.ban.invalid = "Invalid Character ID"
|
||||
i.commands.ban.error = "Error in command. Format: %s <id> [length]"
|
||||
i.commands.ban.length = " until %s"
|
||||
|
||||
i.commands.timer.enabled = "Quest timer enabled"
|
||||
i.commands.timer.disabled = "Quest timer disabled"
|
||||
|
||||
i.commands.ravi.noCommand = "No Raviente command specified!"
|
||||
i.commands.ravi.start.success = "The Great Slaying will begin in a moment"
|
||||
i.commands.ravi.start.error = "The Great Slaying has already begun!"
|
||||
i.commands.ravi.multiplier = "Raviente multiplier is currently %.2fx"
|
||||
i.commands.ravi.res.success = "Sending resurrection support!"
|
||||
i.commands.ravi.res.error = "Resurrection support has not been requested!"
|
||||
i.commands.ravi.sed.success = "Sending sedation support if requested!"
|
||||
i.commands.ravi.request = "Requesting sedation support!"
|
||||
i.commands.ravi.error = "Raviente command not recognised!"
|
||||
i.commands.ravi.noPlayers = "No one has joined the Great Slaying!"
|
||||
i.commands.ravi.version = "This command is disabled outside of MHFZZ"
|
||||
|
||||
i.raviente.berserk = "<Great Slaying: Berserk> is being held!"
|
||||
i.raviente.extreme = "<Great Slaying: Extreme> is being held!"
|
||||
i.raviente.extremeLimited = "<Great Slaying: Extreme (Limited)> is being held!"
|
||||
i.raviente.berserkSmall = "<Great Slaying: Berserk (Small)> is being held!"
|
||||
|
||||
i.guild.rookieGuildName = "Rookie Clan %d"
|
||||
i.guild.returnGuildName = "Return Clan %d"
|
||||
|
||||
i.guild.invite.title = "Invitation!"
|
||||
i.guild.invite.body = "You have been invited to join\n「%s」\nDo you want to accept?"
|
||||
|
||||
i.guild.invite.success.title = "Success!"
|
||||
i.guild.invite.success.body = "You have successfully joined\n「%s」."
|
||||
|
||||
i.guild.invite.accepted.title = "Accepted"
|
||||
i.guild.invite.accepted.body = "The recipient accepted your invitation to join\n「%s」."
|
||||
|
||||
i.guild.invite.rejected.title = "Rejected"
|
||||
i.guild.invite.rejected.body = "You rejected the invitation to join\n「%s」."
|
||||
|
||||
i.guild.invite.declined.title = "Declined"
|
||||
i.guild.invite.declined.body = "The recipient declined your invitation to join\n「%s」."
|
||||
|
||||
i.beads = []Bead{
|
||||
{1, "Bead of Storms", "A prayer bead imbued with the power of storms.\nSummons raging winds to bolster allies."},
|
||||
{3, "Bead of Severing", "A prayer bead imbued with severing power.\nGrants allies increased cutting strength."},
|
||||
{4, "Bead of Vitality", "A prayer bead imbued with vitality.\nBoosts the health of those around it."},
|
||||
{8, "Bead of Healing", "A prayer bead imbued with healing power.\nProtects allies with restorative energy."},
|
||||
{9, "Bead of Fury", "A prayer bead imbued with furious energy.\nFuels allies with battle rage."},
|
||||
{10, "Bead of Blight", "A prayer bead imbued with miasma.\nInfuses allies with poisonous power."},
|
||||
{11, "Bead of Power", "A prayer bead imbued with raw might.\nGrants allies overwhelming strength."},
|
||||
{14, "Bead of Thunder", "A prayer bead imbued with lightning.\nCharges allies with electric force."},
|
||||
{15, "Bead of Ice", "A prayer bead imbued with freezing cold.\nGrants allies chilling elemental power."},
|
||||
{17, "Bead of Fire", "A prayer bead imbued with searing heat.\nIgnites allies with fiery elemental power."},
|
||||
{18, "Bead of Water", "A prayer bead imbued with flowing water.\nGrants allies water elemental power."},
|
||||
{19, "Bead of Dragon", "A prayer bead imbued with dragon energy.\nGrants allies dragon elemental power."},
|
||||
{20, "Bead of Earth", "A prayer bead imbued with earth power.\nGrounds allies with elemental earth force."},
|
||||
{21, "Bead of Wind", "A prayer bead imbued with swift wind.\nGrants allies increased agility."},
|
||||
{22, "Bead of Light", "A prayer bead imbued with radiant light.\nInspires allies with luminous energy."},
|
||||
{23, "Bead of Shadow", "A prayer bead imbued with darkness.\nInfuses allies with shadowy power."},
|
||||
{24, "Bead of Iron", "A prayer bead imbued with iron strength.\nGrants allies fortified defence."},
|
||||
{25, "Bead of Immunity", "A prayer bead imbued with sealing power.\nNullifies elemental weaknesses for allies."},
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
99
server/channelserver/lang_es.go
Normal file
99
server/channelserver/lang_es.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package channelserver
|
||||
|
||||
func langSpanish() i18n {
|
||||
var i i18n
|
||||
|
||||
i.language = "Español"
|
||||
i.cafe.reset = "Se reinicia el %d/%d"
|
||||
i.timer = "Tiempo: %02d:%02d:%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "No tienes permiso para usar este comando"
|
||||
i.commands.disabled = "El comando %s está desactivado"
|
||||
i.commands.reload = "Recargando jugadores..."
|
||||
i.commands.playtime = "Tiempo de juego: %d hora(s) %d minuto(s) %d segundo(s)"
|
||||
|
||||
i.commands.kqf.get = "KQF: %x"
|
||||
i.commands.kqf.set.error = "Error en el comando. Formato: %s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "KQF establecido, por favor cambia de Zona/Mundo"
|
||||
i.commands.kqf.version = "Este comando está desactivado antes de MHFG10"
|
||||
i.commands.rights.error = "Error en el comando. Formato: %s x"
|
||||
i.commands.rights.success = "Establecer entero de derechos: %d"
|
||||
i.commands.course.error = "Error en el comando. Formato: %s <nombre>"
|
||||
i.commands.course.disabled = "Curso %s desactivado"
|
||||
i.commands.course.enabled = "Curso %s activado"
|
||||
i.commands.course.locked = "El curso %s está bloqueado"
|
||||
i.commands.teleport.error = "Error en el comando. Formato: %s x y"
|
||||
i.commands.teleport.success = "Teletransportando a %d %d"
|
||||
i.commands.psn.error = "Error en el comando. Formato: %s <psn id>"
|
||||
i.commands.psn.success = "ID de PSN conectado: %s"
|
||||
i.commands.psn.exists = "Este ID de PSN ya está asociado a otra cuenta"
|
||||
|
||||
i.commands.discord.success = "Tu token de Discord: %s"
|
||||
|
||||
i.commands.ban.noUser = "No se encontró al usuario"
|
||||
i.commands.ban.success = "%s ha sido baneado con éxito"
|
||||
i.commands.ban.invalid = "ID de personaje inválido"
|
||||
i.commands.ban.error = "Error en el comando. Formato: %s <id> [duración]"
|
||||
i.commands.ban.length = " hasta el %s"
|
||||
|
||||
i.commands.timer.enabled = "Temporizador de misión activado"
|
||||
i.commands.timer.disabled = "Temporizador de misión desactivado"
|
||||
|
||||
i.commands.ravi.noCommand = "No se especificó ningún comando de Raviente"
|
||||
i.commands.ravi.start.success = "La Gran Cacería comenzará en un momento"
|
||||
i.commands.ravi.start.error = "¡La Gran Cacería ya ha comenzado!"
|
||||
i.commands.ravi.multiplier = "El multiplicador de Raviente es actualmente %.2fx"
|
||||
i.commands.ravi.res.success = "¡Enviando apoyo de resurrección!"
|
||||
i.commands.ravi.res.error = "¡El apoyo de resurrección no ha sido solicitado!"
|
||||
i.commands.ravi.sed.success = "¡Enviando apoyo de sedación si fue solicitado!"
|
||||
i.commands.ravi.request = "¡Solicitando apoyo de sedación!"
|
||||
i.commands.ravi.error = "¡Comando de Raviente no reconocido!"
|
||||
i.commands.ravi.noPlayers = "¡Nadie se ha unido a la Gran Cacería!"
|
||||
i.commands.ravi.version = "Este comando está desactivado fuera de MHFZZ"
|
||||
|
||||
i.raviente.berserk = "¡<Gran Cacería: Frenesí> está en curso!"
|
||||
i.raviente.extreme = "¡<Gran Cacería: Extremo> está en curso!"
|
||||
i.raviente.extremeLimited = "¡<Gran Cacería: Extremo (Limitado)> está en curso!"
|
||||
i.raviente.berserkSmall = "¡<Gran Cacería: Frenesí (Reducida)> está en curso!"
|
||||
|
||||
i.guild.rookieGuildName = "Clan Novato %d"
|
||||
i.guild.returnGuildName = "Clan Regreso %d"
|
||||
|
||||
i.guild.invite.title = "¡Invitación!"
|
||||
i.guild.invite.body = "Has sido invitado a unirte a\n「%s」\n¿Deseas aceptar?"
|
||||
|
||||
i.guild.invite.success.title = "¡Éxito!"
|
||||
i.guild.invite.success.body = "Te has unido a\n「%s」 con éxito."
|
||||
|
||||
i.guild.invite.accepted.title = "Aceptada"
|
||||
i.guild.invite.accepted.body = "El destinatario aceptó tu invitación para unirse a\n「%s」."
|
||||
|
||||
i.guild.invite.rejected.title = "Rechazada"
|
||||
i.guild.invite.rejected.body = "Rechazaste la invitación para unirte a\n「%s」."
|
||||
|
||||
i.guild.invite.declined.title = "Declinada"
|
||||
i.guild.invite.declined.body = "El destinatario declinó tu invitación para unirse a\n「%s」."
|
||||
|
||||
i.beads = []Bead{
|
||||
{1, "Perla de Tormentas", "Una perla de oración imbuida con el poder de las tormentas.\nInvoca vientos furiosos para fortalecer a los aliados."},
|
||||
{3, "Perla de Corte", "Una perla de oración imbuida con poder cortante.\nOtorga a los aliados mayor fuerza de corte."},
|
||||
{4, "Perla de Vitalidad", "Una perla de oración imbuida con vitalidad.\nAumenta los puntos de vida de los aliados cercanos."},
|
||||
{8, "Perla de Curación", "Una perla de oración imbuida con poder curativo.\nProtege a los aliados con energía restauradora."},
|
||||
{9, "Perla de Furia", "Una perla de oración imbuida con energía furiosa.\nImbuye a los aliados con rabia de combate."},
|
||||
{10, "Perla de Plaga", "Una perla de oración imbuida con miasma.\nInfunde a los aliados con poder venenoso."},
|
||||
{11, "Perla de Poder", "Una perla de oración imbuida con fuerza bruta.\nOtorga a los aliados una fuerza abrumadora."},
|
||||
{14, "Perla del Trueno", "Una perla de oración imbuida con rayos.\nCarga a los aliados con fuerza eléctrica."},
|
||||
{15, "Perla de Hielo", "Una perla de oración imbuida con frío glacial.\nOtorga a los aliados poder elemental helado."},
|
||||
{17, "Perla de Fuego", "Una perla de oración imbuida con calor abrasador.\nEnciende a los aliados con poder elemental ígneo."},
|
||||
{18, "Perla de Agua", "Una perla de oración imbuida con agua fluyente.\nOtorga a los aliados poder elemental acuático."},
|
||||
{19, "Perla del Dragón", "Una perla de oración imbuida con energía dracónica.\nOtorga a los aliados poder elemental dracónico."},
|
||||
{20, "Perla de Tierra", "Una perla de oración imbuida con el poder de la tierra.\nAfianza a los aliados con fuerza elemental telúrica."},
|
||||
{21, "Perla del Viento", "Una perla de oración imbuida con viento veloz.\nOtorga a los aliados mayor agilidad."},
|
||||
{22, "Perla de Luz", "Una perla de oración imbuida con luz radiante.\nInspira a los aliados con energía luminosa."},
|
||||
{23, "Perla de Sombra", "Una perla de oración imbuida con oscuridad.\nInfunde a los aliados con poder sombrío."},
|
||||
{24, "Perla de Hierro", "Una perla de oración imbuida con la resistencia del hierro.\nOtorga a los aliados una defensa reforzada."},
|
||||
{25, "Perla de Inmunidad", "Una perla de oración imbuida con poder de sellado.\nAnula las debilidades elementales de los aliados."},
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
99
server/channelserver/lang_fr.go
Normal file
99
server/channelserver/lang_fr.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package channelserver
|
||||
|
||||
func langFrench() i18n {
|
||||
var i i18n
|
||||
|
||||
i.language = "Français"
|
||||
i.cafe.reset = "Réinitialisation le %d/%d"
|
||||
i.timer = "Temps : %02d:%02d:%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "Vous n'avez pas la permission d'utiliser cette commande"
|
||||
i.commands.disabled = "La commande %s est désactivée"
|
||||
i.commands.reload = "Rechargement des joueurs..."
|
||||
i.commands.playtime = "Temps de jeu : %d heure(s) %d minute(s) %d seconde(s)"
|
||||
|
||||
i.commands.kqf.get = "KQF : %x"
|
||||
i.commands.kqf.set.error = "Erreur de commande. Format : %s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "KQF défini, veuillez changer de Zone/Monde"
|
||||
i.commands.kqf.version = "Cette commande est désactivée avant MHFG10"
|
||||
i.commands.rights.error = "Erreur de commande. Format : %s x"
|
||||
i.commands.rights.success = "Définir entier de droits : %d"
|
||||
i.commands.course.error = "Erreur de commande. Format : %s <nom>"
|
||||
i.commands.course.disabled = "Cours %s désactivé"
|
||||
i.commands.course.enabled = "Cours %s activé"
|
||||
i.commands.course.locked = "Le cours %s est verrouillé"
|
||||
i.commands.teleport.error = "Erreur de commande. Format : %s x y"
|
||||
i.commands.teleport.success = "Téléportation vers %d %d"
|
||||
i.commands.psn.error = "Erreur de commande. Format : %s <psn id>"
|
||||
i.commands.psn.success = "ID PSN connecté : %s"
|
||||
i.commands.psn.exists = "Cet ID PSN est déjà associé à un autre compte !"
|
||||
|
||||
i.commands.discord.success = "Votre jeton Discord : %s"
|
||||
|
||||
i.commands.ban.noUser = "Utilisateur introuvable"
|
||||
i.commands.ban.success = "%s a été banni avec succès"
|
||||
i.commands.ban.invalid = "ID de personnage invalide"
|
||||
i.commands.ban.error = "Erreur de commande. Format : %s <id> [durée]"
|
||||
i.commands.ban.length = " jusqu'au %s"
|
||||
|
||||
i.commands.timer.enabled = "Minuteur de quête activé"
|
||||
i.commands.timer.disabled = "Minuteur de quête désactivé"
|
||||
|
||||
i.commands.ravi.noCommand = "Aucune commande Raviente spécifiée !"
|
||||
i.commands.ravi.start.success = "La Grande Chasse va commencer dans un instant"
|
||||
i.commands.ravi.start.error = "La Grande Chasse a déjà commencé !"
|
||||
i.commands.ravi.multiplier = "Le multiplicateur Raviente est actuellement de %.2fx"
|
||||
i.commands.ravi.res.success = "Envoi du soutien de résurrection !"
|
||||
i.commands.ravi.res.error = "Le soutien de résurrection n'a pas été demandé !"
|
||||
i.commands.ravi.sed.success = "Envoi du soutien de sédation si demandé !"
|
||||
i.commands.ravi.request = "Demande de soutien de sédation !"
|
||||
i.commands.ravi.error = "Commande Raviente non reconnue !"
|
||||
i.commands.ravi.noPlayers = "Personne n'a rejoint la Grande Chasse !"
|
||||
i.commands.ravi.version = "Cette commande est désactivée en dehors de MHFZZ"
|
||||
|
||||
i.raviente.berserk = "<Grande Chasse : Frénésie> est en cours !"
|
||||
i.raviente.extreme = "<Grande Chasse : Extrême> est en cours !"
|
||||
i.raviente.extremeLimited = "<Grande Chasse : Extrême (Limitée)> est en cours !"
|
||||
i.raviente.berserkSmall = "<Grande Chasse : Frénésie (Réduite)> est en cours !"
|
||||
|
||||
i.guild.rookieGuildName = "Clan Novice %d"
|
||||
i.guild.returnGuildName = "Clan Retour %d"
|
||||
|
||||
i.guild.invite.title = "Invitation !"
|
||||
i.guild.invite.body = "Vous avez été invité à rejoindre\n「%s」\nSouhaitez-vous accepter ?"
|
||||
|
||||
i.guild.invite.success.title = "Succès !"
|
||||
i.guild.invite.success.body = "Vous avez rejoint\n「%s」 avec succès."
|
||||
|
||||
i.guild.invite.accepted.title = "Acceptée"
|
||||
i.guild.invite.accepted.body = "Le destinataire a accepté votre invitation à rejoindre\n「%s」."
|
||||
|
||||
i.guild.invite.rejected.title = "Refusée"
|
||||
i.guild.invite.rejected.body = "Vous avez refusé l'invitation à rejoindre\n「%s」."
|
||||
|
||||
i.guild.invite.declined.title = "Déclinée"
|
||||
i.guild.invite.declined.body = "Le destinataire a décliné votre invitation à rejoindre\n「%s」."
|
||||
|
||||
i.beads = []Bead{
|
||||
{1, "Perle des Tempêtes", "Une perle de prière imprégnée du pouvoir des tempêtes.\nInvoque des vents déchaînés pour soutenir les alliés."},
|
||||
{3, "Perle de Tranchant", "Une perle de prière imprégnée du pouvoir tranchant.\nAccorde aux alliés une force de coupe accrue."},
|
||||
{4, "Perle de Vitalité", "Une perle de prière imprégnée de vitalité.\nAugmente les points de vie des alliés proches."},
|
||||
{8, "Perle de Guérison", "Une perle de prière imprégnée du pouvoir de guérison.\nProtège les alliés avec une énergie restauratrice."},
|
||||
{9, "Perle de Fureur", "Une perle de prière imprégnée d'énergie furieuse.\nEmbrasse les alliés d'une rage au combat."},
|
||||
{10, "Perle de Fléau", "Une perle de prière imprégnée de miasmes.\nInfuse les alliés d'un pouvoir venimeux."},
|
||||
{11, "Perle de Puissance", "Une perle de prière imprégnée d'une force brute.\nAccorde aux alliés une force accablante."},
|
||||
{14, "Perle du Tonnerre", "Une perle de prière imprégnée de foudre.\nCharge les alliés d'une force électrique."},
|
||||
{15, "Perle de Glace", "Une perle de prière imprégnée d'un froid glacial.\nAccorde aux alliés un pouvoir élémentaire glacé."},
|
||||
{17, "Perle de Feu", "Une perle de prière imprégnée d'une chaleur brûlante.\nEnflamme les alliés d'un pouvoir élémentaire ardent."},
|
||||
{18, "Perle d'Eau", "Une perle de prière imprégnée d'eau courante.\nAccorde aux alliés un pouvoir élémentaire aquatique."},
|
||||
{19, "Perle du Dragon", "Une perle de prière imprégnée d'énergie draconique.\nAccorde aux alliés un pouvoir élémentaire draconique."},
|
||||
{20, "Perle de Terre", "Une perle de prière imprégnée du pouvoir de la terre.\nAncre les alliés avec une force élémentaire tellurique."},
|
||||
{21, "Perle du Vent", "Une perle de prière imprégnée d'un vent rapide.\nAccorde aux alliés une agilité accrue."},
|
||||
{22, "Perle de Lumière", "Une perle de prière imprégnée d'une lumière radieuse.\nInspire les alliés avec une énergie lumineuse."},
|
||||
{23, "Perle d'Ombre", "Une perle de prière imprégnée d'obscurité.\nInfuse les alliés d'un pouvoir ténébreux."},
|
||||
{24, "Perle de Fer", "Une perle de prière imprégnée de la résistance du fer.\nAccorde aux alliés une défense renforcée."},
|
||||
{25, "Perle d'Immunité", "Une perle de prière imprégnée d'un pouvoir de scellement.\nAnnule les faiblesses élémentaires des alliés."},
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
99
server/channelserver/lang_jp.go
Normal file
99
server/channelserver/lang_jp.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package channelserver
|
||||
|
||||
func langJapanese() i18n {
|
||||
var i i18n
|
||||
|
||||
i.language = "日本語"
|
||||
i.cafe.reset = "%d/%dにリセット"
|
||||
i.timer = "タイマー:%02d'%02d\"%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "このコマンドを使用する権限がありません"
|
||||
i.commands.disabled = "%sのコマンドは無効です"
|
||||
i.commands.reload = "リロードします"
|
||||
i.commands.kqf.get = "現在のキークエストフラグ:%x"
|
||||
i.commands.kqf.set.error = "キークエコマンドエラー 例:%s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "キークエストのフラグが更新されました。ワールド/ランドを移動してください"
|
||||
i.commands.kqf.version = "このコマンドはMHFG10以前では無効です"
|
||||
i.commands.rights.error = "コース更新コマンドエラー 例:%s x"
|
||||
i.commands.rights.success = "コース情報を更新しました:%d"
|
||||
i.commands.course.error = "コース確認コマンドエラー 例:%s <name>"
|
||||
i.commands.course.disabled = "%sコースは無効です"
|
||||
i.commands.course.enabled = "%sコースは有効です"
|
||||
i.commands.course.locked = "%sコースはロックされています"
|
||||
i.commands.teleport.error = "テレポートコマンドエラー 構文:%s x y"
|
||||
i.commands.teleport.success = "%d %dにテレポート"
|
||||
i.commands.psn.error = "PSN連携コマンドエラー 例:%s <psn id>"
|
||||
i.commands.psn.success = "PSN「%s」が連携されています"
|
||||
i.commands.psn.exists = "PSNは既存のユーザに接続されています"
|
||||
|
||||
i.commands.discord.success = "あなたのDiscordトークン:%s"
|
||||
|
||||
i.commands.ban.noUser = "ユーザーが見つかりません"
|
||||
i.commands.ban.success = "%sをBANしました"
|
||||
i.commands.ban.invalid = "無効なキャラクターIDです"
|
||||
i.commands.ban.error = "コマンドエラー 例:%s <id> [期間]"
|
||||
i.commands.ban.length = " ~%sまで"
|
||||
|
||||
i.commands.playtime = "プレイ時間:%d時間%d分%d秒"
|
||||
|
||||
i.commands.timer.enabled = "クエストタイマーが有効になりました"
|
||||
i.commands.timer.disabled = "クエストタイマーが無効になりました"
|
||||
|
||||
i.commands.ravi.noCommand = "ラヴィコマンドが指定されていません"
|
||||
i.commands.ravi.start.success = "大討伐を開始します"
|
||||
i.commands.ravi.start.error = "大討伐は既に開催されています"
|
||||
i.commands.ravi.multiplier = "ラヴィダメージ倍率:x%.2f"
|
||||
i.commands.ravi.res.success = "復活支援を実行します"
|
||||
i.commands.ravi.res.error = "復活支援は実行されませんでした"
|
||||
i.commands.ravi.sed.success = "鎮静支援を実行します"
|
||||
i.commands.ravi.request = "鎮静支援を要請します"
|
||||
i.commands.ravi.error = "ラヴィコマンドが認識されません"
|
||||
i.commands.ravi.noPlayers = "誰も大討伐に参加していません"
|
||||
i.commands.ravi.version = "このコマンドはMHFZZ以外では無効です"
|
||||
|
||||
i.raviente.berserk = "<大討伐:猛狂期>が開催されました!"
|
||||
i.raviente.extreme = "<大討伐:猛狂期【極】>が開催されました!"
|
||||
i.raviente.extremeLimited = "<大討伐:猛狂期【極】(制限付)>が開催されました!"
|
||||
i.raviente.berserkSmall = "<大討伐:猛狂期(小数)>が開催されました!"
|
||||
|
||||
i.guild.rookieGuildName = "新米猟団%d"
|
||||
i.guild.returnGuildName = "復帰猟団%d"
|
||||
|
||||
i.guild.invite.title = "猟団勧誘のご案内"
|
||||
i.guild.invite.body = "猟団「%s」からの勧誘通知です。\n「勧誘に返答」より、返答を行ってください。"
|
||||
|
||||
i.guild.invite.success.title = "成功"
|
||||
i.guild.invite.success.body = "あなたは「%s」に参加できました。"
|
||||
|
||||
i.guild.invite.accepted.title = "承諾されました"
|
||||
i.guild.invite.accepted.body = "招待した狩人が「%s」への招待を承諾しました。"
|
||||
|
||||
i.guild.invite.rejected.title = "却下しました"
|
||||
i.guild.invite.rejected.body = "あなたは「%s」への参加を却下しました。"
|
||||
|
||||
i.guild.invite.declined.title = "辞退しました"
|
||||
i.guild.invite.declined.body = "招待した狩人が「%s」への招待を辞退しました。"
|
||||
|
||||
i.beads = []Bead{
|
||||
{1, "暴風の祈珠", "暴風の力を宿した祈珠。\n嵐を呼ぶ力で仲間を鼓舞する。"},
|
||||
{3, "断力の祈珠", "断力の力を宿した祈珠。\n斬撃の力を仲間に授ける。"},
|
||||
{4, "活力の祈珠", "活力の力を宿した祈珠。\n体力を高める力で仲間を鼓舞する。"},
|
||||
{8, "癒しの祈珠", "癒しの力を宿した祈珠。\n回復の力で仲間を守る。"},
|
||||
{9, "激昂の祈珠", "激昂の力を宿した祈珠。\n怒りの力を仲間に与える。"},
|
||||
{10, "瘴気の祈珠", "瘴気の力を宿した祈珠。\n毒霧の力を仲間に与える。"},
|
||||
{11, "剛力の祈珠", "剛力の力を宿した祈珠。\n強大な力を仲間に授ける。"},
|
||||
{14, "雷光の祈珠", "雷光の力を宿した祈珠。\n稲妻の力を仲間に与える。"},
|
||||
{15, "氷結の祈珠", "氷結の力を宿した祈珠。\n冷気の力を仲間に与える。"},
|
||||
{17, "炎熱の祈珠", "炎熱の力を宿した祈珠。\n炎の力を仲間に与える。"},
|
||||
{18, "水流の祈珠", "水流の力を宿した祈珠。\n水の力を仲間に与える。"},
|
||||
{19, "龍気の祈珠", "龍気の力を宿した祈珠。\n龍属性の力を仲間に与える。"},
|
||||
{20, "大地の祈珠", "大地の力を宿した祈珠。\n大地の力を仲間に与える。"},
|
||||
{21, "疾風の祈珠", "疾風の力を宿した祈珠。\n素早さを高める力を仲間に与える。"},
|
||||
{22, "光輝の祈珠", "光輝の力を宿した祈珠。\n光の力で仲間を鼓舞する。"},
|
||||
{23, "暗影の祈珠", "暗影の力を宿した祈珠。\n闇の力を仲間に与える。"},
|
||||
{24, "鋼鉄の祈珠", "鋼鉄の力を宿した祈珠。\n防御力を高める力を仲間に与える。"},
|
||||
{25, "封属の祈珠", "封属の力を宿した祈珠。\n属性を封じる力を仲間に与える。"},
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
1101
server/channelserver/quest_json.go
Normal file
1101
server/channelserver/quest_json.go
Normal file
File diff suppressed because it is too large
Load Diff
844
server/channelserver/quest_json_parser.go
Normal file
844
server/channelserver/quest_json_parser.go
Normal file
@@ -0,0 +1,844 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"golang.org/x/text/encoding/japanese"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
// ParseQuestBinary reads a MHF quest binary (ZZ/G10 layout, little-endian)
|
||||
// and returns a QuestJSON ready for re-compilation with CompileQuestJSON.
|
||||
//
|
||||
// The binary layout is described in quest_json.go (CompileQuestJSON).
|
||||
// Sections guarded by null pointers in the header are skipped; the
|
||||
// corresponding QuestJSON slices will be nil/empty.
|
||||
func ParseQuestBinary(data []byte) (*QuestJSON, error) {
|
||||
if len(data) < 0x86 {
|
||||
return nil, fmt.Errorf("quest binary too short: %d bytes (minimum 0x86)", len(data))
|
||||
}
|
||||
|
||||
// ── Helper closures ──────────────────────────────────────────────────
|
||||
u8 := func(off int) uint8 {
|
||||
return data[off]
|
||||
}
|
||||
u16 := func(off int) uint16 {
|
||||
return binary.LittleEndian.Uint16(data[off:])
|
||||
}
|
||||
i16 := func(off int) int16 {
|
||||
return int16(binary.LittleEndian.Uint16(data[off:]))
|
||||
}
|
||||
u32 := func(off int) uint32 {
|
||||
return binary.LittleEndian.Uint32(data[off:])
|
||||
}
|
||||
f32 := func(off int) float32 {
|
||||
return math.Float32frombits(binary.LittleEndian.Uint32(data[off:]))
|
||||
}
|
||||
|
||||
// check bounds-checks a read of n bytes at off.
|
||||
check := func(off, n int, ctx string) error {
|
||||
if off < 0 || off+n > len(data) {
|
||||
return fmt.Errorf("%s: offset 0x%X len %d out of bounds (file len %d)", ctx, off, n, len(data))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// readSJIS reads a null-terminated Shift-JIS string starting at off.
|
||||
readSJIS := func(off int) (string, error) {
|
||||
if off < 0 || off >= len(data) {
|
||||
return "", fmt.Errorf("string offset 0x%X out of bounds", off)
|
||||
}
|
||||
end := off
|
||||
for end < len(data) && data[end] != 0 {
|
||||
end++
|
||||
}
|
||||
sjis := data[off:end]
|
||||
if len(sjis) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
dec := japanese.ShiftJIS.NewDecoder()
|
||||
utf8, _, err := transform.Bytes(dec, sjis)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("shift-jis decode at 0x%X: %w", off, err)
|
||||
}
|
||||
return string(utf8), nil
|
||||
}
|
||||
|
||||
q := &QuestJSON{}
|
||||
|
||||
// ── Header (0x00–0x43) ───────────────────────────────────────────────
|
||||
questTypeFlagsPtr := int(u32(0x00))
|
||||
loadedStagesPtr := int(u32(0x04))
|
||||
supplyBoxPtr := int(u32(0x08))
|
||||
rewardPtr := int(u32(0x0C))
|
||||
questAreaPtr := int(u32(0x14))
|
||||
largeMonsterPtr := int(u32(0x18))
|
||||
areaTransitionsPtr := int(u32(0x1C))
|
||||
areaMappingPtr := int(u32(0x20))
|
||||
mapInfoPtr := int(u32(0x24))
|
||||
gatheringPointsPtr := int(u32(0x28))
|
||||
areaFacilitiesPtr := int(u32(0x2C))
|
||||
someStringsPtr := int(u32(0x30))
|
||||
unk34Ptr := int(u32(0x34)) // stages-end sentinel
|
||||
gatheringTablesPtr := int(u32(0x38))
|
||||
|
||||
// ── General Quest Properties (0x44–0x85) ────────────────────────────
|
||||
q.MonsterSizeMulti = u16(0x44)
|
||||
q.SizeRange = u16(0x46)
|
||||
q.StatTable1 = u32(0x48)
|
||||
q.MainRankPoints = u32(0x4C)
|
||||
// 0x50 unknown u32 — skipped
|
||||
q.SubARankPoints = u32(0x54)
|
||||
q.SubBRankPoints = u32(0x58)
|
||||
// 0x5C questTypeID/unknown — skipped
|
||||
// 0x60 padding
|
||||
q.StatTable2 = u8(0x61)
|
||||
// 0x62–0x72 padding
|
||||
// 0x73 questKn1, 0x74 questKn2, 0x76 questKn3 — skipped
|
||||
gatheringTablesQty := int(u16(0x78))
|
||||
// 0x7A unknown
|
||||
area1Zones := int(u8(0x7C))
|
||||
// 0x7D–0x7F area2–4Zones (not needed for parsing)
|
||||
|
||||
// ── Main Quest Properties (at questTypeFlagsPtr, 320 bytes) ─────────
|
||||
if questTypeFlagsPtr == 0 {
|
||||
return nil, fmt.Errorf("questTypeFlagsPtr is null; cannot read main quest properties")
|
||||
}
|
||||
if err := check(questTypeFlagsPtr, questBodyLenZZ, "mainQuestProperties"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mp := questTypeFlagsPtr // shorthand
|
||||
|
||||
q.RankBand = u16(mp + 0x08)
|
||||
q.Fee = u32(mp + 0x0C)
|
||||
q.RewardMain = u32(mp + 0x10)
|
||||
q.RewardSubA = u16(mp + 0x18)
|
||||
q.RewardSubB = u16(mp + 0x1C)
|
||||
q.HardHRReq = u16(mp + 0x1E)
|
||||
questFrames := u32(mp + 0x20)
|
||||
q.TimeLimitMinutes = questFrames / (60 * 30)
|
||||
q.Map = u32(mp + 0x24)
|
||||
questStringsPtr := int(u32(mp + 0x28))
|
||||
q.QuestID = u16(mp + 0x2E)
|
||||
|
||||
// +0x30 objectives[3] (8 bytes each)
|
||||
objectives, err := parseObjectives(data, mp+0x30)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.ObjectiveMain = objectives[0]
|
||||
q.ObjectiveSubA = objectives[1]
|
||||
q.ObjectiveSubB = objectives[2]
|
||||
|
||||
// +0x4C joinRankMin/Max, postRankMin/Max
|
||||
q.JoinRankMin = u16(mp + 0x4C)
|
||||
q.JoinRankMax = u16(mp + 0x4E)
|
||||
q.PostRankMin = u16(mp + 0x50)
|
||||
q.PostRankMax = u16(mp + 0x52)
|
||||
|
||||
// +0x5C forced equipment (6 slots × 4 × u16 = 48 bytes)
|
||||
eq, hasEquip := parseForcedEquip(data, mp+0x5C)
|
||||
if hasEquip {
|
||||
q.ForcedEquipment = eq
|
||||
}
|
||||
|
||||
// +0x97 questVariants
|
||||
q.QuestVariant1 = u8(mp + 0x97)
|
||||
q.QuestVariant2 = u8(mp + 0x98)
|
||||
q.QuestVariant3 = u8(mp + 0x99)
|
||||
q.QuestVariant4 = u8(mp + 0x9A)
|
||||
|
||||
// ── QuestText strings ────────────────────────────────────────────────
|
||||
if questStringsPtr != 0 {
|
||||
if err := check(questStringsPtr, 32, "questTextTable"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
strPtrs := make([]int, 8)
|
||||
for i := range strPtrs {
|
||||
strPtrs[i] = int(u32(questStringsPtr + i*4))
|
||||
}
|
||||
texts := make([]string, 8)
|
||||
for i, ptr := range strPtrs {
|
||||
if ptr == 0 {
|
||||
continue
|
||||
}
|
||||
s, err := readSJIS(ptr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("string[%d]: %w", i, err)
|
||||
}
|
||||
texts[i] = s
|
||||
}
|
||||
q.Title = texts[0]
|
||||
q.TextMain = texts[1]
|
||||
q.TextSubA = texts[2]
|
||||
q.TextSubB = texts[3]
|
||||
q.SuccessCond = texts[4]
|
||||
q.FailCond = texts[5]
|
||||
q.Contractor = texts[6]
|
||||
q.Description = texts[7]
|
||||
}
|
||||
|
||||
// ── Stages ───────────────────────────────────────────────────────────
|
||||
if loadedStagesPtr != 0 && unk34Ptr > loadedStagesPtr {
|
||||
off := loadedStagesPtr
|
||||
for off+16 <= unk34Ptr {
|
||||
if err := check(off, 16, "stage"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stageID := u32(off)
|
||||
q.Stages = append(q.Stages, QuestStageJSON{StageID: stageID})
|
||||
off += 16
|
||||
}
|
||||
}
|
||||
|
||||
// ── Supply Box ───────────────────────────────────────────────────────
|
||||
if supplyBoxPtr != 0 {
|
||||
const supplyBoxSize = (24 + 8 + 8) * 4
|
||||
if err := check(supplyBoxPtr, supplyBoxSize, "supplyBox"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.SupplyMain = readSupplySlots(data, supplyBoxPtr, 24)
|
||||
q.SupplySubA = readSupplySlots(data, supplyBoxPtr+24*4, 8)
|
||||
q.SupplySubB = readSupplySlots(data, supplyBoxPtr+24*4+8*4, 8)
|
||||
}
|
||||
|
||||
// ── Reward Tables ────────────────────────────────────────────────────
|
||||
if rewardPtr != 0 {
|
||||
tables, err := parseRewardTables(data, rewardPtr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.Rewards = tables
|
||||
}
|
||||
|
||||
// ── Large Monster Spawns ─────────────────────────────────────────────
|
||||
if largeMonsterPtr != 0 {
|
||||
monsters, err := parseMonsterSpawns(data, largeMonsterPtr, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.LargeMonsters = monsters
|
||||
}
|
||||
|
||||
// ── Map Sections (questAreaPtr) ──────────────────────────────────────
|
||||
// Layout: u32 ptr[] terminated by u32(0), then each mapSection:
|
||||
// u32 loadedStage, u32 unk, u32 spawnTypesPtr, u32 spawnStatsPtr,
|
||||
// u32(0) gap, u16 unk — then spawnTypes and spawnStats data.
|
||||
if questAreaPtr != 0 {
|
||||
sections, err := parseMapSections(data, questAreaPtr, u32, u16, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.MapSections = sections
|
||||
}
|
||||
|
||||
// ── Area Mappings (areaMappingPtr) ────────────────────────────────────
|
||||
// Read AreaMappings until reaching areaTransitionsPtr (or end of file
|
||||
// if areaTransitionsPtr is null). Each entry is 32 bytes.
|
||||
if areaMappingPtr != 0 {
|
||||
endOff := len(data)
|
||||
if areaTransitionsPtr != 0 {
|
||||
endOff = areaTransitionsPtr
|
||||
}
|
||||
mappings, err := parseAreaMappings(data, areaMappingPtr, endOff, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.AreaMappings = mappings
|
||||
}
|
||||
|
||||
// ── Area Transitions (areaTransitionsPtr) ─────────────────────────────
|
||||
// playerAreaChange[area1Zones]: one u32 ptr per zone.
|
||||
if areaTransitionsPtr != 0 && area1Zones > 0 {
|
||||
transitions, err := parseAreaTransitions(data, areaTransitionsPtr, area1Zones, u32, i16, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.AreaTransitions = transitions
|
||||
}
|
||||
|
||||
// ── Map Info (mapInfoPtr) ─────────────────────────────────────────────
|
||||
if mapInfoPtr != 0 {
|
||||
if err := check(mapInfoPtr, 8, "mapInfo"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.MapInfo = &QuestMapInfoJSON{
|
||||
MapID: u32(mapInfoPtr),
|
||||
ReturnBCID: u32(mapInfoPtr + 4),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Gathering Points (gatheringPointsPtr) ─────────────────────────────
|
||||
// ptGatheringPoint[area1Zones]: one u32 ptr per zone.
|
||||
if gatheringPointsPtr != 0 && area1Zones > 0 {
|
||||
gatherPts, err := parseGatheringPoints(data, gatheringPointsPtr, area1Zones, u32, u16, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.GatheringPoints = gatherPts
|
||||
}
|
||||
|
||||
// ── Area Facilities (areaFacilitiesPtr) ───────────────────────────────
|
||||
// ptVar<facPointBlock>[area1Zones]: one u32 ptr per zone.
|
||||
if areaFacilitiesPtr != 0 && area1Zones > 0 {
|
||||
facilities, err := parseAreaFacilities(data, areaFacilitiesPtr, area1Zones, u32, u16, f32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.AreaFacilities = facilities
|
||||
}
|
||||
|
||||
// ── Some Strings (someStringsPtr / unk30) ─────────────────────────────
|
||||
// Layout: ptr someStringPtr, ptr questTypePtr (8 bytes at someStringsPtr).
|
||||
if someStringsPtr != 0 {
|
||||
if err := check(someStringsPtr, 8, "someStrings"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
someStrP := int(u32(someStringsPtr))
|
||||
questTypeP := int(u32(someStringsPtr + 4))
|
||||
if someStrP != 0 {
|
||||
s, err := readSJIS(someStrP)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("someString: %w", err)
|
||||
}
|
||||
q.SomeString = s
|
||||
}
|
||||
if questTypeP != 0 {
|
||||
s, err := readSJIS(questTypeP)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("questTypeString: %w", err)
|
||||
}
|
||||
q.QuestType = s
|
||||
}
|
||||
}
|
||||
|
||||
// ── Gathering Tables (gatheringTablesPtr) ─────────────────────────────
|
||||
// ptVar<gatheringTable>[gatheringTablesQty]: one u32 ptr per table.
|
||||
// GatherItem: u16 rate + u16 item, terminated by u16(0xFFFF).
|
||||
if gatheringTablesPtr != 0 && gatheringTablesQty > 0 {
|
||||
tables, err := parseGatheringTables(data, gatheringTablesPtr, gatheringTablesQty, u32, u16)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.GatheringTables = tables
|
||||
}
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
||||
// ── Section parsers ──────────────────────────────────────────────────────────
|
||||
|
||||
// parseObjectives reads the three 8-byte objective entries at off.
|
||||
func parseObjectives(data []byte, off int) ([3]QuestObjectiveJSON, error) {
|
||||
var objs [3]QuestObjectiveJSON
|
||||
for i := range objs {
|
||||
base := off + i*8
|
||||
if base+8 > len(data) {
|
||||
return objs, fmt.Errorf("objective[%d] at 0x%X out of bounds", i, base)
|
||||
}
|
||||
goalType := binary.LittleEndian.Uint32(data[base:])
|
||||
typeName, ok := objTypeToString(goalType)
|
||||
if !ok {
|
||||
typeName = "none"
|
||||
}
|
||||
obj := QuestObjectiveJSON{Type: typeName}
|
||||
|
||||
if goalType != questObjNone {
|
||||
switch goalType {
|
||||
case questObjHunt, questObjCapture, questObjSlay, questObjDamage,
|
||||
questObjSlayOrDamage, questObjBreakPart:
|
||||
obj.Target = uint16(data[base+4])
|
||||
// data[base+5] is padding
|
||||
default:
|
||||
obj.Target = binary.LittleEndian.Uint16(data[base+4:])
|
||||
}
|
||||
|
||||
secondary := binary.LittleEndian.Uint16(data[base+6:])
|
||||
if goalType == questObjBreakPart {
|
||||
obj.Part = secondary
|
||||
} else {
|
||||
obj.Count = secondary
|
||||
}
|
||||
}
|
||||
objs[i] = obj
|
||||
}
|
||||
return objs, nil
|
||||
}
|
||||
|
||||
// parseForcedEquip reads 6 slots × 4 uint16 at off.
|
||||
// Returns nil, false if all values are zero (no forced equipment).
|
||||
func parseForcedEquip(data []byte, off int) (*QuestForcedEquipJSON, bool) {
|
||||
eq := &QuestForcedEquipJSON{}
|
||||
slots := []*[4]uint16{&eq.Legs, &eq.Weapon, &eq.Head, &eq.Chest, &eq.Arms, &eq.Waist}
|
||||
anyNonZero := false
|
||||
for _, slot := range slots {
|
||||
for j := range slot {
|
||||
v := binary.LittleEndian.Uint16(data[off:])
|
||||
slot[j] = v
|
||||
if v != 0 {
|
||||
anyNonZero = true
|
||||
}
|
||||
off += 2
|
||||
}
|
||||
}
|
||||
if !anyNonZero {
|
||||
return nil, false
|
||||
}
|
||||
return eq, true
|
||||
}
|
||||
|
||||
// readSupplySlots reads n supply item slots (each 4 bytes: u16 item + u16 qty)
|
||||
// starting at off and returns only non-empty entries (item != 0).
|
||||
func readSupplySlots(data []byte, off, n int) []QuestSupplyItemJSON {
|
||||
var out []QuestSupplyItemJSON
|
||||
for i := 0; i < n; i++ {
|
||||
base := off + i*4
|
||||
item := binary.LittleEndian.Uint16(data[base:])
|
||||
qty := binary.LittleEndian.Uint16(data[base+2:])
|
||||
if item == 0 {
|
||||
continue
|
||||
}
|
||||
out = append(out, QuestSupplyItemJSON{Item: item, Quantity: qty})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// parseRewardTables reads the reward table array starting at baseOff.
|
||||
// Header array: {u8 tableId, u8 pad, u16 pad, u32 tableOffset} per entry,
|
||||
// terminated by int16(-1). tableOffset is relative to baseOff.
|
||||
// Each item list: {u16 rate, u16 item, u16 quantity} terminated by int16(-1).
|
||||
func parseRewardTables(data []byte, baseOff int) ([]QuestRewardTableJSON, error) {
|
||||
var tables []QuestRewardTableJSON
|
||||
off := baseOff
|
||||
for {
|
||||
if off+2 > len(data) {
|
||||
return nil, fmt.Errorf("reward table header truncated at 0x%X", off)
|
||||
}
|
||||
if binary.LittleEndian.Uint16(data[off:]) == 0xFFFF {
|
||||
break
|
||||
}
|
||||
if off+8 > len(data) {
|
||||
return nil, fmt.Errorf("reward table header entry truncated at 0x%X", off)
|
||||
}
|
||||
tableID := data[off]
|
||||
tableOff := int(binary.LittleEndian.Uint32(data[off+4:])) + baseOff
|
||||
off += 8
|
||||
|
||||
items, err := parseRewardItems(data, tableOff)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reward table %d items: %w", tableID, err)
|
||||
}
|
||||
tables = append(tables, QuestRewardTableJSON{TableID: tableID, Items: items})
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
// parseRewardItems reads a null-terminated reward item list at off.
|
||||
func parseRewardItems(data []byte, off int) ([]QuestRewardItemJSON, error) {
|
||||
var items []QuestRewardItemJSON
|
||||
for {
|
||||
if off+2 > len(data) {
|
||||
return nil, fmt.Errorf("reward item list truncated at 0x%X", off)
|
||||
}
|
||||
if binary.LittleEndian.Uint16(data[off:]) == 0xFFFF {
|
||||
break
|
||||
}
|
||||
if off+6 > len(data) {
|
||||
return nil, fmt.Errorf("reward item entry truncated at 0x%X", off)
|
||||
}
|
||||
rate := binary.LittleEndian.Uint16(data[off:])
|
||||
item := binary.LittleEndian.Uint16(data[off+2:])
|
||||
qty := binary.LittleEndian.Uint16(data[off+4:])
|
||||
items = append(items, QuestRewardItemJSON{Rate: rate, Item: item, Quantity: qty})
|
||||
off += 6
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
// parseMonsterSpawns reads large monster spawn entries at baseOff.
|
||||
// Each entry is 60 bytes; the list is terminated by a 0xFF byte.
|
||||
func parseMonsterSpawns(data []byte, baseOff int, f32fn func(int) float32) ([]QuestMonsterJSON, error) {
|
||||
var monsters []QuestMonsterJSON
|
||||
off := baseOff
|
||||
const entrySize = 60
|
||||
for {
|
||||
if off >= len(data) {
|
||||
return nil, fmt.Errorf("monster spawn list unterminated at end of file")
|
||||
}
|
||||
if data[off] == 0xFF {
|
||||
break
|
||||
}
|
||||
if off+entrySize > len(data) {
|
||||
return nil, fmt.Errorf("monster spawn entry at 0x%X truncated", off)
|
||||
}
|
||||
m := QuestMonsterJSON{
|
||||
ID: data[off],
|
||||
SpawnAmount: binary.LittleEndian.Uint32(data[off+4:]),
|
||||
SpawnStage: binary.LittleEndian.Uint32(data[off+8:]),
|
||||
// +0x0C padding[16]
|
||||
Orientation: binary.LittleEndian.Uint32(data[off+0x1C:]),
|
||||
X: f32fn(off + 0x20),
|
||||
Y: f32fn(off + 0x24),
|
||||
Z: f32fn(off + 0x28),
|
||||
// +0x2C padding[16]
|
||||
}
|
||||
monsters = append(monsters, m)
|
||||
off += entrySize
|
||||
}
|
||||
return monsters, nil
|
||||
}
|
||||
|
||||
// parseMapSections reads the MapZones structure at baseOff.
|
||||
// Layout: u32 ptr[] terminated by u32(0); each ptr points to a mapSection:
|
||||
//
|
||||
// u32 loadedStage, u32 unk, u32 spawnTypesPtr, u32 spawnStatsPtr.
|
||||
//
|
||||
// After the 16-byte mapSection: u32(0) gap + u16 unk (2 bytes).
|
||||
// spawnTypes: varPaddT<MonsterID,3> = u8+pad[3] per entry, terminated by 0xFFFF.
|
||||
// spawnStats: MinionSpawn (60 bytes) per entry, terminated by 0xFFFF in first 2 bytes.
|
||||
func parseMapSections(data []byte, baseOff int,
|
||||
u32fn func(int) uint32,
|
||||
u16fn func(int) uint16,
|
||||
f32fn func(int) float32,
|
||||
) ([]QuestMapSectionJSON, error) {
|
||||
var sections []QuestMapSectionJSON
|
||||
|
||||
// Read pointer array (terminated by u32(0)).
|
||||
off := baseOff
|
||||
for {
|
||||
if off+4 > len(data) {
|
||||
return nil, fmt.Errorf("mapSection pointer array truncated at 0x%X", off)
|
||||
}
|
||||
ptr := int(u32fn(off))
|
||||
off += 4
|
||||
if ptr == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
// Read mapSection at ptr.
|
||||
if ptr+16 > len(data) {
|
||||
return nil, fmt.Errorf("mapSection at 0x%X truncated", ptr)
|
||||
}
|
||||
loadedStage := u32fn(ptr)
|
||||
// ptr+4 is unk u32 — skip
|
||||
spawnTypesPtr := int(u32fn(ptr + 8))
|
||||
spawnStatsPtr := int(u32fn(ptr + 12))
|
||||
|
||||
ms := QuestMapSectionJSON{LoadedStage: loadedStage}
|
||||
|
||||
// Read spawnTypes: varPaddT<MonsterID,3> terminated by 0xFFFF.
|
||||
if spawnTypesPtr != 0 {
|
||||
stOff := spawnTypesPtr
|
||||
for {
|
||||
if stOff+2 > len(data) {
|
||||
return nil, fmt.Errorf("spawnTypes at 0x%X truncated", stOff)
|
||||
}
|
||||
if u16fn(stOff) == 0xFFFF {
|
||||
break
|
||||
}
|
||||
if stOff+4 > len(data) {
|
||||
return nil, fmt.Errorf("spawnType entry at 0x%X truncated", stOff)
|
||||
}
|
||||
monID := data[stOff]
|
||||
ms.SpawnMonsters = append(ms.SpawnMonsters, monID)
|
||||
stOff += 4 // u8 + pad[3]
|
||||
}
|
||||
}
|
||||
|
||||
// Read spawnStats: MinionSpawn terminated by 0xFFFF in first 2 bytes.
|
||||
if spawnStatsPtr != 0 {
|
||||
const minionSize = 60
|
||||
ssOff := spawnStatsPtr
|
||||
for {
|
||||
if ssOff+2 > len(data) {
|
||||
return nil, fmt.Errorf("spawnStats at 0x%X truncated", ssOff)
|
||||
}
|
||||
// Terminator: first 2 bytes == 0xFFFF.
|
||||
if u16fn(ssOff) == 0xFFFF {
|
||||
break
|
||||
}
|
||||
if ssOff+minionSize > len(data) {
|
||||
return nil, fmt.Errorf("minionSpawn at 0x%X truncated", ssOff)
|
||||
}
|
||||
spawn := QuestMinionSpawnJSON{
|
||||
Monster: data[ssOff],
|
||||
// ssOff+1 padding
|
||||
SpawnToggle: u16fn(ssOff + 2),
|
||||
SpawnAmount: u32fn(ssOff + 4),
|
||||
// +8 unk u32, +0xC pad[16], +0x1C unk u32
|
||||
X: f32fn(ssOff + 0x20),
|
||||
Y: f32fn(ssOff + 0x24),
|
||||
Z: f32fn(ssOff + 0x28),
|
||||
}
|
||||
ms.MinionSpawns = append(ms.MinionSpawns, spawn)
|
||||
ssOff += minionSize
|
||||
}
|
||||
}
|
||||
|
||||
sections = append(sections, ms)
|
||||
}
|
||||
|
||||
return sections, nil
|
||||
}
|
||||
|
||||
// parseAreaMappings reads AreaMappings entries at baseOff until endOff.
|
||||
// Each entry is 32 bytes: float areaX, float areaZ, pad[8],
|
||||
// float baseX, float baseZ, float knPos, pad[4].
|
||||
func parseAreaMappings(data []byte, baseOff, endOff int, f32fn func(int) float32) ([]QuestAreaMappingJSON, error) {
|
||||
var mappings []QuestAreaMappingJSON
|
||||
const entrySize = 32
|
||||
off := baseOff
|
||||
for off+entrySize <= endOff {
|
||||
if off+entrySize > len(data) {
|
||||
return nil, fmt.Errorf("areaMapping at 0x%X truncated", off)
|
||||
}
|
||||
am := QuestAreaMappingJSON{
|
||||
AreaX: f32fn(off),
|
||||
AreaZ: f32fn(off + 4),
|
||||
// off+8: pad[8]
|
||||
BaseX: f32fn(off + 16),
|
||||
BaseZ: f32fn(off + 20),
|
||||
KnPos: f32fn(off + 24),
|
||||
// off+28: pad[4]
|
||||
}
|
||||
mappings = append(mappings, am)
|
||||
off += entrySize
|
||||
}
|
||||
return mappings, nil
|
||||
}
|
||||
|
||||
// parseAreaTransitions reads playerAreaChange[numZones] at baseOff.
|
||||
// Each entry is a u32 pointer to a floatSet array terminated by s16(-1).
|
||||
// floatSet: s16 targetStageId + s16 stageVariant + float[3] current + float[5] box +
|
||||
// float[3] target + s16[2] rotation = 52 bytes.
|
||||
func parseAreaTransitions(data []byte, baseOff, numZones int,
|
||||
u32fn func(int) uint32,
|
||||
i16fn func(int) int16,
|
||||
f32fn func(int) float32,
|
||||
) ([]QuestAreaTransitionsJSON, error) {
|
||||
result := make([]QuestAreaTransitionsJSON, numZones)
|
||||
|
||||
if baseOff+numZones*4 > len(data) {
|
||||
return nil, fmt.Errorf("areaTransitions pointer array at 0x%X truncated", baseOff)
|
||||
}
|
||||
|
||||
for i := 0; i < numZones; i++ {
|
||||
ptr := int(u32fn(baseOff + i*4))
|
||||
if ptr == 0 {
|
||||
// Null pointer — no transitions for this zone.
|
||||
continue
|
||||
}
|
||||
|
||||
// Read floatSet entries until targetStageId1 == -1.
|
||||
var transitions []QuestAreaTransitionJSON
|
||||
off := ptr
|
||||
for {
|
||||
if off+2 > len(data) {
|
||||
return nil, fmt.Errorf("floatSet at 0x%X truncated", off)
|
||||
}
|
||||
targetStageID := i16fn(off)
|
||||
if targetStageID == -1 {
|
||||
break
|
||||
}
|
||||
// Each floatSet is 52 bytes:
|
||||
// s16 targetStageId1 + s16 stageVariant = 4
|
||||
// float[3] current = 12
|
||||
// float[5] transitionBox = 20
|
||||
// float[3] target = 12
|
||||
// s16[2] rotation = 4
|
||||
// Total = 52
|
||||
const floatSetSize = 52
|
||||
if off+floatSetSize > len(data) {
|
||||
return nil, fmt.Errorf("floatSet at 0x%X truncated (need %d bytes)", off, floatSetSize)
|
||||
}
|
||||
tr := QuestAreaTransitionJSON{
|
||||
TargetStageID1: targetStageID,
|
||||
StageVariant: i16fn(off + 2),
|
||||
CurrentX: f32fn(off + 4),
|
||||
CurrentY: f32fn(off + 8),
|
||||
CurrentZ: f32fn(off + 12),
|
||||
TargetX: f32fn(off + 36),
|
||||
TargetY: f32fn(off + 40),
|
||||
TargetZ: f32fn(off + 44),
|
||||
}
|
||||
for j := 0; j < 5; j++ {
|
||||
tr.TransitionBox[j] = f32fn(off + 16 + j*4)
|
||||
}
|
||||
tr.TargetRotation[0] = i16fn(off + 48)
|
||||
tr.TargetRotation[1] = i16fn(off + 50)
|
||||
transitions = append(transitions, tr)
|
||||
off += floatSetSize
|
||||
}
|
||||
result[i] = QuestAreaTransitionsJSON{Transitions: transitions}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseGatheringPoints reads ptGatheringPoint[numZones] at baseOff.
|
||||
// Each entry is a u32 pointer to gatheringPoint[4] terminated by xPos==-1.0.
|
||||
// gatheringPoint: float xPos, yPos, zPos, range, u16 gatheringID, u16 maxCount, pad[2], u16 minCount = 24 bytes.
|
||||
func parseGatheringPoints(data []byte, baseOff, numZones int,
|
||||
u32fn func(int) uint32,
|
||||
u16fn func(int) uint16,
|
||||
f32fn func(int) float32,
|
||||
) ([]QuestAreaGatheringJSON, error) {
|
||||
result := make([]QuestAreaGatheringJSON, numZones)
|
||||
|
||||
if baseOff+numZones*4 > len(data) {
|
||||
return nil, fmt.Errorf("gatheringPoints pointer array at 0x%X truncated", baseOff)
|
||||
}
|
||||
|
||||
const sentinel = uint32(0xBF800000) // float32(-1.0)
|
||||
const pointSize = 24
|
||||
|
||||
for i := 0; i < numZones; i++ {
|
||||
ptr := int(u32fn(baseOff + i*4))
|
||||
if ptr == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var points []QuestGatheringPointJSON
|
||||
off := ptr
|
||||
for {
|
||||
if off+4 > len(data) {
|
||||
return nil, fmt.Errorf("gatheringPoint at 0x%X truncated", off)
|
||||
}
|
||||
// Terminator: xPos bit pattern == 0xBF800000 (-1.0f).
|
||||
if binary.LittleEndian.Uint32(data[off:]) == sentinel {
|
||||
break
|
||||
}
|
||||
if off+pointSize > len(data) {
|
||||
return nil, fmt.Errorf("gatheringPoint entry at 0x%X truncated", off)
|
||||
}
|
||||
gp := QuestGatheringPointJSON{
|
||||
X: f32fn(off),
|
||||
Y: f32fn(off + 4),
|
||||
Z: f32fn(off + 8),
|
||||
Range: f32fn(off + 12),
|
||||
GatheringID: u16fn(off + 16),
|
||||
MaxCount: u16fn(off + 18),
|
||||
// off+20 pad[2]
|
||||
MinCount: u16fn(off + 22),
|
||||
}
|
||||
points = append(points, gp)
|
||||
off += pointSize
|
||||
}
|
||||
result[i] = QuestAreaGatheringJSON{Points: points}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseAreaFacilities reads ptVar<facPointBlock>[numZones] at baseOff.
|
||||
// Each entry is a u32 pointer to a facPointBlock.
|
||||
// facPoint: pad[2] + SpecAc(u16) + xPos + yPos + zPos + range + id(u16) + pad[2] = 24 bytes.
|
||||
// Termination: the loop condition checks read_unsigned($+4,4) != 0xBF800000.
|
||||
// So a facPoint whose xPos (at offset +4 from start of that potential entry) == -1.0 terminates.
|
||||
// After all facPoints: padding[0xC] + float + float = 20 bytes (block footer, not parsed into JSON).
|
||||
func parseAreaFacilities(data []byte, baseOff, numZones int,
|
||||
u32fn func(int) uint32,
|
||||
u16fn func(int) uint16,
|
||||
f32fn func(int) float32,
|
||||
) ([]QuestAreaFacilitiesJSON, error) {
|
||||
result := make([]QuestAreaFacilitiesJSON, numZones)
|
||||
|
||||
if baseOff+numZones*4 > len(data) {
|
||||
return nil, fmt.Errorf("areaFacilities pointer array at 0x%X truncated", baseOff)
|
||||
}
|
||||
|
||||
const sentinel = uint32(0xBF800000)
|
||||
const pointSize = 24
|
||||
|
||||
for i := 0; i < numZones; i++ {
|
||||
ptr := int(u32fn(baseOff + i*4))
|
||||
if ptr == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var points []QuestFacilityPointJSON
|
||||
off := ptr
|
||||
for off+8 <= len(data) {
|
||||
// Check: read_unsigned($+4, 4) == sentinel means terminate.
|
||||
// $+4 is the xPos field of the potential next facPoint.
|
||||
if binary.LittleEndian.Uint32(data[off+4:]) == sentinel {
|
||||
break
|
||||
}
|
||||
if off+pointSize > len(data) {
|
||||
return nil, fmt.Errorf("facPoint at 0x%X truncated", off)
|
||||
}
|
||||
fp := QuestFacilityPointJSON{
|
||||
// off+0: pad[2]
|
||||
Type: u16fn(off + 2),
|
||||
X: f32fn(off + 4),
|
||||
Y: f32fn(off + 8),
|
||||
Z: f32fn(off + 12),
|
||||
Range: f32fn(off + 16),
|
||||
ID: u16fn(off + 20),
|
||||
// off+22: pad[2]
|
||||
}
|
||||
points = append(points, fp)
|
||||
off += pointSize
|
||||
}
|
||||
result[i] = QuestAreaFacilitiesJSON{Points: points}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseGatheringTables reads ptVar<gatheringTable>[count] at baseOff.
|
||||
// Each entry is a u32 pointer to GatherItem[] terminated by u16(0xFFFF).
|
||||
// GatherItem: u16 rate + u16 item = 4 bytes.
|
||||
func parseGatheringTables(data []byte, baseOff, count int,
|
||||
u32fn func(int) uint32,
|
||||
u16fn func(int) uint16,
|
||||
) ([]QuestGatheringTableJSON, error) {
|
||||
result := make([]QuestGatheringTableJSON, count)
|
||||
|
||||
if baseOff+count*4 > len(data) {
|
||||
return nil, fmt.Errorf("gatheringTables pointer array at 0x%X truncated", baseOff)
|
||||
}
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
ptr := int(u32fn(baseOff + i*4))
|
||||
if ptr == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var items []QuestGatherItemJSON
|
||||
off := ptr
|
||||
for {
|
||||
if off+2 > len(data) {
|
||||
return nil, fmt.Errorf("gatheringTable at 0x%X truncated", off)
|
||||
}
|
||||
if u16fn(off) == 0xFFFF {
|
||||
break
|
||||
}
|
||||
if off+4 > len(data) {
|
||||
return nil, fmt.Errorf("gatherItem at 0x%X truncated", off)
|
||||
}
|
||||
items = append(items, QuestGatherItemJSON{
|
||||
Rate: u16fn(off),
|
||||
Item: u16fn(off + 2),
|
||||
})
|
||||
off += 4
|
||||
}
|
||||
result[i] = QuestGatheringTableJSON{Items: items}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// objTypeToString maps a uint32 goal type to its JSON string name.
|
||||
// Returns "", false for unknown types.
|
||||
func objTypeToString(t uint32) (string, bool) {
|
||||
for name, v := range questObjTypeMap {
|
||||
if v == t {
|
||||
return name, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
1265
server/channelserver/quest_json_test.go
Normal file
1265
server/channelserver/quest_json_test.go
Normal file
File diff suppressed because it is too large
Load Diff
181
server/channelserver/rengoku_binary.go
Normal file
181
server/channelserver/rengoku_binary.go
Normal file
@@ -0,0 +1,181 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// rengoku binary layout (after ECD decryption + JKR decompression):
|
||||
//
|
||||
// @0x00: magic bytes 'r','e','f',0x1A
|
||||
// @0x04: version (u8, expected 1)
|
||||
// @0x05: 15 bytes of header offsets (unused by this parser)
|
||||
// @0x14: RoadMode multiDef (24 bytes)
|
||||
// @0x2C: RoadMode soloDef (24 bytes)
|
||||
const (
|
||||
rengokuMinSize = 0x44 // header (0x14) + two RoadModes (2×24)
|
||||
rengokuMultiOffset = 0x14
|
||||
rengokuSoloOffset = 0x2C
|
||||
floorStatsByteSize = 24
|
||||
spawnTableByteSize = 32
|
||||
spawnPtrEntrySize = 4 // each spawn-table pointer is a u32
|
||||
)
|
||||
|
||||
// rengokuRoadMode holds a parsed RoadMode struct. All pointer fields are file
|
||||
// offsets into the raw (decrypted + decompressed) byte slice.
|
||||
type rengokuRoadMode struct {
|
||||
FloorStatsCount uint32
|
||||
SpawnCountCount uint32
|
||||
SpawnTablePtrCount uint32
|
||||
FloorStatsPtr uint32 // → FloorStats[FloorStatsCount]
|
||||
SpawnTablePtrsPtr uint32 // → u32[SpawnTablePtrCount] → SpawnTable[]
|
||||
SpawnCountPtrsPtr uint32 // → u32[SpawnCountCount]
|
||||
}
|
||||
|
||||
// RengokuBinaryInfo summarises the validated rengoku_data.bin contents for
|
||||
// structured logging. It is populated by parseRengokuBinary.
|
||||
type RengokuBinaryInfo struct {
|
||||
MultiFloors int
|
||||
MultiSpawnTables int
|
||||
SoloFloors int
|
||||
SoloSpawnTables int
|
||||
UniqueMonsters int
|
||||
}
|
||||
|
||||
// parseRengokuBinary validates the structural integrity of a decrypted and
|
||||
// decompressed rengoku_data.bin and returns a summary of its contents.
|
||||
//
|
||||
// It checks:
|
||||
// - magic bytes and version
|
||||
// - all pointer-derived ranges lie within the file
|
||||
// - individual spawn-table pointers fall within the file
|
||||
func parseRengokuBinary(data []byte) (*RengokuBinaryInfo, error) {
|
||||
if len(data) < rengokuMinSize {
|
||||
return nil, fmt.Errorf("rengoku: file too small (%d bytes, need %d)", len(data), rengokuMinSize)
|
||||
}
|
||||
|
||||
// Magic: 'r','e','f',0x1A
|
||||
if data[0] != 'r' || data[1] != 'e' || data[2] != 'f' || data[3] != 0x1A {
|
||||
return nil, fmt.Errorf("rengoku: invalid magic %02x %02x %02x %02x",
|
||||
data[0], data[1], data[2], data[3])
|
||||
}
|
||||
|
||||
if data[4] != 1 {
|
||||
return nil, fmt.Errorf("rengoku: unexpected version %d (want 1)", data[4])
|
||||
}
|
||||
|
||||
multi, err := readRoadMode(data, rengokuMultiOffset)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rengoku: multiDef: %w", err)
|
||||
}
|
||||
solo, err := readRoadMode(data, rengokuSoloOffset)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rengoku: soloDef: %w", err)
|
||||
}
|
||||
|
||||
if err := validateRoadMode(data, multi, "multiDef"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := validateRoadMode(data, solo, "soloDef"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
uniqueMonsters := countUniqueMonsters(data, multi)
|
||||
for id := range countUniqueMonsters(data, solo) {
|
||||
uniqueMonsters[id] = struct{}{}
|
||||
}
|
||||
|
||||
return &RengokuBinaryInfo{
|
||||
MultiFloors: int(multi.FloorStatsCount),
|
||||
MultiSpawnTables: int(multi.SpawnTablePtrCount),
|
||||
SoloFloors: int(solo.FloorStatsCount),
|
||||
SoloSpawnTables: int(solo.SpawnTablePtrCount),
|
||||
UniqueMonsters: len(uniqueMonsters),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// readRoadMode reads a 24-byte RoadMode struct from data at offset.
|
||||
func readRoadMode(data []byte, offset int) (rengokuRoadMode, error) {
|
||||
end := offset + 24
|
||||
if len(data) < end {
|
||||
return rengokuRoadMode{}, fmt.Errorf("RoadMode at 0x%X extends beyond file", offset)
|
||||
}
|
||||
d := data[offset:]
|
||||
return rengokuRoadMode{
|
||||
FloorStatsCount: binary.LittleEndian.Uint32(d[0:]),
|
||||
SpawnCountCount: binary.LittleEndian.Uint32(d[4:]),
|
||||
SpawnTablePtrCount: binary.LittleEndian.Uint32(d[8:]),
|
||||
FloorStatsPtr: binary.LittleEndian.Uint32(d[12:]),
|
||||
SpawnTablePtrsPtr: binary.LittleEndian.Uint32(d[16:]),
|
||||
SpawnCountPtrsPtr: binary.LittleEndian.Uint32(d[20:]),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ptrInBounds returns true if the region [ptr, ptr+size) fits within data.
|
||||
// It guards against overflow when ptr+size wraps uint32.
|
||||
func ptrInBounds(data []byte, ptr, size uint32) bool {
|
||||
end := ptr + size
|
||||
if end < ptr { // overflow
|
||||
return false
|
||||
}
|
||||
return int(end) <= len(data)
|
||||
}
|
||||
|
||||
// validateRoadMode checks that all pointer-derived byte ranges for a RoadMode
|
||||
// lie within data.
|
||||
func validateRoadMode(data []byte, rm rengokuRoadMode, label string) error {
|
||||
fileLen := uint32(len(data))
|
||||
|
||||
// Floor-stats array bounds.
|
||||
if !ptrInBounds(data, rm.FloorStatsPtr, rm.FloorStatsCount*floorStatsByteSize) {
|
||||
return fmt.Errorf("rengoku: %s: floorStats array [0x%X, +%d×%d] out of bounds (file %d B)",
|
||||
label, rm.FloorStatsPtr, rm.FloorStatsCount, floorStatsByteSize, fileLen)
|
||||
}
|
||||
|
||||
// Spawn-table pointer array bounds.
|
||||
if !ptrInBounds(data, rm.SpawnTablePtrsPtr, rm.SpawnTablePtrCount*spawnPtrEntrySize) {
|
||||
return fmt.Errorf("rengoku: %s: spawnTablePtrs array [0x%X, +%d×4] out of bounds (file %d B)",
|
||||
label, rm.SpawnTablePtrsPtr, rm.SpawnTablePtrCount, fileLen)
|
||||
}
|
||||
|
||||
// Spawn-count pointer array bounds.
|
||||
if !ptrInBounds(data, rm.SpawnCountPtrsPtr, rm.SpawnCountCount*spawnPtrEntrySize) {
|
||||
return fmt.Errorf("rengoku: %s: spawnCountPtrs array [0x%X, +%d×4] out of bounds (file %d B)",
|
||||
label, rm.SpawnCountPtrsPtr, rm.SpawnCountCount, fileLen)
|
||||
}
|
||||
|
||||
// Individual spawn-table pointer targets.
|
||||
ptrBase := rm.SpawnTablePtrsPtr
|
||||
for i := uint32(0); i < rm.SpawnTablePtrCount; i++ {
|
||||
tablePtr := binary.LittleEndian.Uint32(data[ptrBase+i*4:])
|
||||
if !ptrInBounds(data, tablePtr, spawnTableByteSize) {
|
||||
return fmt.Errorf("rengoku: %s: spawnTable[%d] at 0x%X is out of bounds (file %d B)",
|
||||
label, i, tablePtr, fileLen)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// countUniqueMonsters iterates all SpawnTables for a RoadMode and returns a
|
||||
// set of unique non-zero monster IDs (from both monsterID1 and monsterID2).
|
||||
func countUniqueMonsters(data []byte, rm rengokuRoadMode) map[uint32]struct{} {
|
||||
ids := make(map[uint32]struct{})
|
||||
ptrBase := rm.SpawnTablePtrsPtr
|
||||
for i := uint32(0); i < rm.SpawnTablePtrCount; i++ {
|
||||
tablePtr := binary.LittleEndian.Uint32(data[ptrBase+i*4:])
|
||||
if !ptrInBounds(data, tablePtr, spawnTableByteSize) {
|
||||
continue
|
||||
}
|
||||
t := data[tablePtr:]
|
||||
id1 := binary.LittleEndian.Uint32(t[0:])
|
||||
id2 := binary.LittleEndian.Uint32(t[8:])
|
||||
if id1 != 0 {
|
||||
ids[id1] = struct{}{}
|
||||
}
|
||||
if id2 != 0 {
|
||||
ids[id2] = struct{}{}
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
182
server/channelserver/rengoku_binary_test.go
Normal file
182
server/channelserver/rengoku_binary_test.go
Normal file
@@ -0,0 +1,182 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// buildRengokuData constructs a minimal but structurally valid rengoku binary
|
||||
// for testing. It contains one floor and one spawn table per road mode.
|
||||
//
|
||||
// Layout:
|
||||
//
|
||||
// 0x00–0x13 header (magic + version + padding)
|
||||
// 0x14–0x2B multiDef RoadMode
|
||||
// 0x2C–0x43 soloDef RoadMode
|
||||
// 0x44–0x5B multiDef FloorStats (24 bytes)
|
||||
// 0x5C–0x63 multiDef spawnTablePtrs (1×u32 = 4 bytes)
|
||||
// 0x64–0x67 multiDef spawnCountPtrs (1×u32 = 4 bytes)
|
||||
// 0x68–0x87 multiDef SpawnTable (32 bytes)
|
||||
// 0x88–0x9F soloDef FloorStats (24 bytes)
|
||||
// 0xA0–0xA3 soloDef spawnTablePtrs (1×u32)
|
||||
// 0xA4–0xA7 soloDef spawnCountPtrs (1×u32)
|
||||
// 0xA8–0xC7 soloDef SpawnTable (32 bytes)
|
||||
func buildRengokuData(multiMonster1, multiMonster2, soloMonster1, soloMonster2 uint32) []byte {
|
||||
buf := make([]byte, 0xC8)
|
||||
|
||||
// Header
|
||||
buf[0] = 'r'
|
||||
buf[1] = 'e'
|
||||
buf[2] = 'f'
|
||||
buf[3] = 0x1A
|
||||
buf[4] = 1 // version
|
||||
|
||||
le := binary.LittleEndian
|
||||
|
||||
// multiDef RoadMode at 0x14
|
||||
le.PutUint32(buf[0x14:], 1) // floorStatsCount
|
||||
le.PutUint32(buf[0x18:], 1) // spawnCountCount
|
||||
le.PutUint32(buf[0x1C:], 1) // spawnTablePtrCount
|
||||
le.PutUint32(buf[0x20:], 0x44) // floorStatsPtr
|
||||
le.PutUint32(buf[0x24:], 0x5C) // spawnTablePtrsPtr
|
||||
le.PutUint32(buf[0x28:], 0x64) // spawnCountPtrsPtr
|
||||
|
||||
// soloDef RoadMode at 0x2C
|
||||
le.PutUint32(buf[0x2C:], 1) // floorStatsCount
|
||||
le.PutUint32(buf[0x30:], 1) // spawnCountCount
|
||||
le.PutUint32(buf[0x34:], 1) // spawnTablePtrCount
|
||||
le.PutUint32(buf[0x38:], 0x88) // floorStatsPtr
|
||||
le.PutUint32(buf[0x3C:], 0xA0) // spawnTablePtrsPtr
|
||||
le.PutUint32(buf[0x40:], 0xA4) // spawnCountPtrsPtr
|
||||
|
||||
// multiDef FloorStats at 0x44 (24 bytes)
|
||||
le.PutUint32(buf[0x44:], 1) // floorNumber
|
||||
|
||||
// multiDef spawnTablePtrs at 0x5C: points to SpawnTable at 0x68
|
||||
le.PutUint32(buf[0x5C:], 0x68)
|
||||
|
||||
// multiDef SpawnTable at 0x68 (32 bytes)
|
||||
le.PutUint32(buf[0x68:], multiMonster1)
|
||||
le.PutUint32(buf[0x70:], multiMonster2)
|
||||
|
||||
// soloDef FloorStats at 0x88 (24 bytes)
|
||||
le.PutUint32(buf[0x88:], 1) // floorNumber
|
||||
|
||||
// soloDef spawnTablePtrs at 0xA0: points to SpawnTable at 0xA8
|
||||
le.PutUint32(buf[0xA0:], 0xA8)
|
||||
|
||||
// soloDef SpawnTable at 0xA8 (32 bytes)
|
||||
le.PutUint32(buf[0xA8:], soloMonster1)
|
||||
le.PutUint32(buf[0xB0:], soloMonster2)
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
func TestParseRengokuBinary_ValidMinimal(t *testing.T) {
|
||||
data := buildRengokuData(101, 102, 103, 101) // monster 101 appears in both roads
|
||||
|
||||
info, err := parseRengokuBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("parseRengokuBinary: %v", err)
|
||||
}
|
||||
if info.MultiFloors != 1 {
|
||||
t.Errorf("MultiFloors = %d, want 1", info.MultiFloors)
|
||||
}
|
||||
if info.MultiSpawnTables != 1 {
|
||||
t.Errorf("MultiSpawnTables = %d, want 1", info.MultiSpawnTables)
|
||||
}
|
||||
if info.SoloFloors != 1 {
|
||||
t.Errorf("SoloFloors = %d, want 1", info.SoloFloors)
|
||||
}
|
||||
if info.SoloSpawnTables != 1 {
|
||||
t.Errorf("SoloSpawnTables = %d, want 1", info.SoloSpawnTables)
|
||||
}
|
||||
// IDs present: 101, 102, 103 → 3 unique (101 shared between roads)
|
||||
if info.UniqueMonsters != 3 {
|
||||
t.Errorf("UniqueMonsters = %d, want 3", info.UniqueMonsters)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRengokuBinary_ZeroMonsterIDsExcluded(t *testing.T) {
|
||||
data := buildRengokuData(0, 55, 0, 0) // only monster 55 is non-zero
|
||||
|
||||
info, err := parseRengokuBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("parseRengokuBinary: %v", err)
|
||||
}
|
||||
if info.UniqueMonsters != 1 {
|
||||
t.Errorf("UniqueMonsters = %d, want 1 (zeros excluded)", info.UniqueMonsters)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRengokuBinary_Errors(t *testing.T) {
|
||||
validData := buildRengokuData(1, 2, 3, 4)
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "too_small",
|
||||
data: make([]byte, 10),
|
||||
wantErr: "too small",
|
||||
},
|
||||
{
|
||||
name: "bad_magic",
|
||||
data: func() []byte {
|
||||
d := make([]byte, len(validData))
|
||||
copy(d, validData)
|
||||
d[0] = 0xFF
|
||||
return d
|
||||
}(),
|
||||
wantErr: "invalid magic",
|
||||
},
|
||||
{
|
||||
name: "wrong_version",
|
||||
data: func() []byte {
|
||||
d := make([]byte, len(validData))
|
||||
copy(d, validData)
|
||||
d[4] = 2
|
||||
return d
|
||||
}(),
|
||||
wantErr: "unexpected version",
|
||||
},
|
||||
{
|
||||
name: "floorStats_ptr_out_of_bounds",
|
||||
data: func() []byte {
|
||||
d := make([]byte, len(validData))
|
||||
copy(d, validData)
|
||||
// Set multiDef floorStatsPtr to beyond file end
|
||||
binary.LittleEndian.PutUint32(d[0x20:], uint32(len(d)+1))
|
||||
return d
|
||||
}(),
|
||||
wantErr: "out of bounds",
|
||||
},
|
||||
{
|
||||
name: "spawnTable_ptr_target_out_of_bounds",
|
||||
data: func() []byte {
|
||||
d := make([]byte, len(validData))
|
||||
copy(d, validData)
|
||||
// Point the spawn table pointer to just before the end so SpawnTable
|
||||
// (32 bytes) would extend beyond the file.
|
||||
binary.LittleEndian.PutUint32(d[0x5C:], uint32(len(d)-4))
|
||||
return d
|
||||
}(),
|
||||
wantErr: "out of bounds",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
_, err := parseRengokuBinary(tc.data)
|
||||
if err == nil {
|
||||
t.Fatal("expected error, got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), tc.wantErr) {
|
||||
t.Errorf("error %q does not contain %q", err.Error(), tc.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
270
server/channelserver/rengoku_build.go
Normal file
270
server/channelserver/rengoku_build.go
Normal file
@@ -0,0 +1,270 @@
|
||||
package channelserver
|
||||
|
||||
/*
|
||||
JSON-based rengoku_data.bin builder.
|
||||
|
||||
Operators can place rengoku_data.json in the bin/ directory instead of
|
||||
(or alongside) rengoku_data.bin. When the JSON file is found it takes
|
||||
precedence: it is parsed, validated, assembled into the raw binary layout,
|
||||
and ECD-encrypted before being cached. The .bin file is used as a fallback.
|
||||
|
||||
Binary layout produced by BuildRengokuBinary:
|
||||
0x00–0x13 header (20 bytes: magic + version + zeros)
|
||||
0x14–0x2B multiDef RoadMode (24 bytes)
|
||||
0x2C–0x43 soloDef RoadMode (24 bytes)
|
||||
-- multi road data --
|
||||
floorStats[] (floorStatsCount × 24 bytes)
|
||||
spawnTablePtrs[] (spawnTablePtrCount × 4 bytes)
|
||||
spawnCountPtrs[] (spawnTablePtrCount × 4 bytes, zeroed)
|
||||
spawnTables[] (spawnTablePtrCount × 32 bytes)
|
||||
-- solo road data -- (same sub-layout)
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"erupe-ce/common/decryption"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// ─── JSON schema ────────────────────────────────────────────────────────────
|
||||
|
||||
// RengokuConfig is the top-level JSON structure for rengoku_data.json.
|
||||
type RengokuConfig struct {
|
||||
MultiRoad RoadConfig `json:"multi_road"`
|
||||
SoloRoad RoadConfig `json:"solo_road"`
|
||||
}
|
||||
|
||||
// RoadConfig describes one road mode (multi or solo) with its floors and
|
||||
// spawn tables. Floors reference spawn tables by zero-based index.
|
||||
type RoadConfig struct {
|
||||
Floors []FloorConfig `json:"floors"`
|
||||
SpawnTables []SpawnTableConfig `json:"spawn_tables"`
|
||||
}
|
||||
|
||||
// FloorConfig describes one floor within a road mode.
|
||||
//
|
||||
// - SpawnTableIndex: zero-based index into this road's SpawnTables slice,
|
||||
// selecting which monster configuration is active on this floor.
|
||||
// - PointMulti1/2: point multipliers applied to rewards on this floor.
|
||||
// - FinalLoop: non-zero on the last floor of a loop cycle.
|
||||
type FloorConfig struct {
|
||||
FloorNumber uint32 `json:"floor_number"`
|
||||
SpawnTableIndex uint32 `json:"spawn_table_index"`
|
||||
Unk0 uint32 `json:"unk0,omitempty"`
|
||||
PointMulti1 float32 `json:"point_multi_1"`
|
||||
PointMulti2 float32 `json:"point_multi_2"`
|
||||
FinalLoop uint32 `json:"final_loop,omitempty"`
|
||||
}
|
||||
|
||||
// SpawnTableConfig describes the two monsters that appear together on a floor.
|
||||
type SpawnTableConfig struct {
|
||||
Monster1ID uint32 `json:"monster1_id"`
|
||||
Monster1Variant uint32 `json:"monster1_variant,omitempty"`
|
||||
Monster2ID uint32 `json:"monster2_id"`
|
||||
Monster2Variant uint32 `json:"monster2_variant,omitempty"`
|
||||
StatTable uint32 `json:"stat_table,omitempty"`
|
||||
MapZoneOverride uint32 `json:"map_zone_override,omitempty"`
|
||||
SpawnWeighting uint32 `json:"spawn_weighting,omitempty"`
|
||||
AdditionalFlag uint32 `json:"additional_flag,omitempty"`
|
||||
}
|
||||
|
||||
// ─── Builder ─────────────────────────────────────────────────────────────────
|
||||
|
||||
// BuildRengokuBinary assembles a raw (unencrypted, uncompressed) rengoku
|
||||
// binary from a RengokuConfig. The result can be passed to EncodeECD and
|
||||
// served directly to clients.
|
||||
func BuildRengokuBinary(cfg RengokuConfig) ([]byte, error) {
|
||||
if err := validateRengokuConfig(cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ── Offset plan ──────────────────────────────────────────────────────────
|
||||
// Fixed regions: header (0x14) + two RoadModes (2×24) = 0x44
|
||||
const dataStart = uint32(rengokuMinSize) // 0x44
|
||||
|
||||
// Multi road sections
|
||||
mFloorOff := dataStart
|
||||
mFloorSz := uint32(len(cfg.MultiRoad.Floors)) * floorStatsByteSize
|
||||
mPtrsOff := mFloorOff + mFloorSz
|
||||
mPtrsSz := uint32(len(cfg.MultiRoad.SpawnTables)) * spawnPtrEntrySize
|
||||
mCntOff := mPtrsOff + mPtrsSz
|
||||
mCntSz := uint32(len(cfg.MultiRoad.SpawnTables)) * spawnPtrEntrySize
|
||||
mTablesOff := mCntOff + mCntSz
|
||||
mTablesSz := uint32(len(cfg.MultiRoad.SpawnTables)) * spawnTableByteSize
|
||||
|
||||
// Solo road sections (appended directly after multi)
|
||||
sFloorOff := mTablesOff + mTablesSz
|
||||
sFloorSz := uint32(len(cfg.SoloRoad.Floors)) * floorStatsByteSize
|
||||
sPtrsOff := sFloorOff + sFloorSz
|
||||
sPtrsSz := uint32(len(cfg.SoloRoad.SpawnTables)) * spawnPtrEntrySize
|
||||
sCntOff := sPtrsOff + sPtrsSz
|
||||
sCntSz := uint32(len(cfg.SoloRoad.SpawnTables)) * spawnPtrEntrySize
|
||||
sTablesOff := sCntOff + sCntSz
|
||||
sTablesSz := uint32(len(cfg.SoloRoad.SpawnTables)) * spawnTableByteSize
|
||||
|
||||
totalSize := sTablesOff + sTablesSz
|
||||
buf := make([]byte, totalSize)
|
||||
|
||||
// ── Header ───────────────────────────────────────────────────────────────
|
||||
buf[0], buf[1], buf[2], buf[3] = 'r', 'e', 'f', 0x1A
|
||||
buf[4] = 1 // version
|
||||
|
||||
le := binary.LittleEndian
|
||||
|
||||
// ── RoadMode structs ─────────────────────────────────────────────────────
|
||||
writeRoadMode(buf, 0x14, le, RoadModeFields{
|
||||
FloorCount: uint32(len(cfg.MultiRoad.Floors)),
|
||||
SpawnCount: uint32(len(cfg.MultiRoad.SpawnTables)),
|
||||
TablePtrCnt: uint32(len(cfg.MultiRoad.SpawnTables)),
|
||||
FloorPtr: mFloorOff,
|
||||
TablePtrsPtr: mPtrsOff,
|
||||
CountPtrsPtr: mCntOff,
|
||||
})
|
||||
writeRoadMode(buf, 0x2C, le, RoadModeFields{
|
||||
FloorCount: uint32(len(cfg.SoloRoad.Floors)),
|
||||
SpawnCount: uint32(len(cfg.SoloRoad.SpawnTables)),
|
||||
TablePtrCnt: uint32(len(cfg.SoloRoad.SpawnTables)),
|
||||
FloorPtr: sFloorOff,
|
||||
TablePtrsPtr: sPtrsOff,
|
||||
CountPtrsPtr: sCntOff,
|
||||
})
|
||||
|
||||
// ── Data sections ────────────────────────────────────────────────────────
|
||||
writeFloors(buf, cfg.MultiRoad.Floors, mFloorOff, le)
|
||||
writeSpawnSection(buf, cfg.MultiRoad.SpawnTables, mPtrsOff, mTablesOff, le)
|
||||
|
||||
writeFloors(buf, cfg.SoloRoad.Floors, sFloorOff, le)
|
||||
writeSpawnSection(buf, cfg.SoloRoad.SpawnTables, sPtrsOff, sTablesOff, le)
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
// RoadModeFields carries the computed field values for one RoadMode struct.
|
||||
type RoadModeFields struct {
|
||||
FloorCount, SpawnCount, TablePtrCnt uint32
|
||||
FloorPtr, TablePtrsPtr, CountPtrsPtr uint32
|
||||
}
|
||||
|
||||
func writeRoadMode(buf []byte, offset int, le binary.ByteOrder, f RoadModeFields) {
|
||||
le.PutUint32(buf[offset:], f.FloorCount)
|
||||
le.PutUint32(buf[offset+4:], f.SpawnCount)
|
||||
le.PutUint32(buf[offset+8:], f.TablePtrCnt)
|
||||
le.PutUint32(buf[offset+12:], f.FloorPtr)
|
||||
le.PutUint32(buf[offset+16:], f.TablePtrsPtr)
|
||||
le.PutUint32(buf[offset+20:], f.CountPtrsPtr)
|
||||
}
|
||||
|
||||
func writeFloors(buf []byte, floors []FloorConfig, base uint32, le binary.ByteOrder) {
|
||||
for i, f := range floors {
|
||||
off := base + uint32(i)*floorStatsByteSize
|
||||
le.PutUint32(buf[off:], f.FloorNumber)
|
||||
le.PutUint32(buf[off+4:], f.SpawnTableIndex)
|
||||
le.PutUint32(buf[off+8:], f.Unk0)
|
||||
le.PutUint32(buf[off+12:], math.Float32bits(f.PointMulti1))
|
||||
le.PutUint32(buf[off+16:], math.Float32bits(f.PointMulti2))
|
||||
le.PutUint32(buf[off+20:], f.FinalLoop)
|
||||
}
|
||||
}
|
||||
|
||||
func writeSpawnSection(buf []byte, tables []SpawnTableConfig, ptrsBase, tablesBase uint32, le binary.ByteOrder) {
|
||||
for i, t := range tables {
|
||||
tableOff := tablesBase + uint32(i)*spawnTableByteSize
|
||||
// Pointer entry
|
||||
le.PutUint32(buf[ptrsBase+uint32(i)*spawnPtrEntrySize:], tableOff)
|
||||
// SpawnTable (32 bytes)
|
||||
le.PutUint32(buf[tableOff:], t.Monster1ID)
|
||||
le.PutUint32(buf[tableOff+4:], t.Monster1Variant)
|
||||
le.PutUint32(buf[tableOff+8:], t.Monster2ID)
|
||||
le.PutUint32(buf[tableOff+12:], t.Monster2Variant)
|
||||
le.PutUint32(buf[tableOff+16:], t.StatTable)
|
||||
le.PutUint32(buf[tableOff+20:], t.MapZoneOverride)
|
||||
le.PutUint32(buf[tableOff+24:], t.SpawnWeighting)
|
||||
le.PutUint32(buf[tableOff+28:], t.AdditionalFlag)
|
||||
}
|
||||
}
|
||||
|
||||
// validateRengokuConfig checks that all spawn_table_index references are
|
||||
// within range for both road modes.
|
||||
func validateRengokuConfig(cfg RengokuConfig) error {
|
||||
for _, road := range []struct {
|
||||
name string
|
||||
r RoadConfig
|
||||
}{{"multi_road", cfg.MultiRoad}, {"solo_road", cfg.SoloRoad}} {
|
||||
n := len(road.r.SpawnTables)
|
||||
for i, f := range road.r.Floors {
|
||||
if int(f.SpawnTableIndex) >= n {
|
||||
return fmt.Errorf("rengoku: %s floor %d: spawn_table_index %d out of range (have %d tables)",
|
||||
road.name, i, f.SpawnTableIndex, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ─── Shared helper ───────────────────────────────────────────────────────────
|
||||
|
||||
// encodeRengokuECD wraps decryption.EncodeECD with error logging.
|
||||
func encodeRengokuECD(raw []byte, logger *zap.Logger) ([]byte, error) {
|
||||
enc, err := decryption.EncodeECD(raw, decryption.DefaultECDKey)
|
||||
if err != nil {
|
||||
logger.Error("rengoku: ECD encryption failed", zap.Error(err))
|
||||
}
|
||||
return enc, err
|
||||
}
|
||||
|
||||
// ─── JSON loader ─────────────────────────────────────────────────────────────
|
||||
|
||||
// loadRengokuFromJSON attempts to load rengoku configuration from
|
||||
// rengoku_data.json in binPath. It returns the ECD-encrypted binary ready for
|
||||
// caching, or nil if the file is absent or cannot be processed.
|
||||
func loadRengokuFromJSON(binPath string, logger *zap.Logger) []byte {
|
||||
path := filepath.Join(binPath, "rengoku_data.json")
|
||||
raw, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil // file absent — not an error
|
||||
}
|
||||
|
||||
var cfg RengokuConfig
|
||||
if err := json.Unmarshal(raw, &cfg); err != nil {
|
||||
logger.Error("rengoku_data.json: JSON parse error",
|
||||
zap.String("path", path), zap.Error(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
bin, err := BuildRengokuBinary(cfg)
|
||||
if err != nil {
|
||||
logger.Error("rengoku_data.json: binary build failed",
|
||||
zap.String("path", path), zap.Error(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate the freshly built binary (should always pass, but good to confirm).
|
||||
info, parseErr := parseRengokuBinary(bin)
|
||||
if parseErr != nil {
|
||||
logger.Error("rengoku_data.json: structural validation of built binary failed",
|
||||
zap.String("path", path), zap.Error(parseErr))
|
||||
return nil
|
||||
}
|
||||
|
||||
enc, err := encodeRengokuECD(bin, logger)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
logger.Info("Hunting Road config (from JSON)",
|
||||
zap.Int("multi_floors", info.MultiFloors),
|
||||
zap.Int("multi_spawn_tables", info.MultiSpawnTables),
|
||||
zap.Int("solo_floors", info.SoloFloors),
|
||||
zap.Int("solo_spawn_tables", info.SoloSpawnTables),
|
||||
zap.Int("unique_monsters", info.UniqueMonsters),
|
||||
)
|
||||
logger.Info("Loaded rengoku_data.json", zap.Int("bytes", len(enc)))
|
||||
return enc
|
||||
}
|
||||
216
server/channelserver/rengoku_build_test.go
Normal file
216
server/channelserver/rengoku_build_test.go
Normal file
@@ -0,0 +1,216 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// sampleRengokuConfig returns a small but complete RengokuConfig for tests.
|
||||
func sampleRengokuConfig() RengokuConfig {
|
||||
spawnTables := []SpawnTableConfig{
|
||||
{Monster1ID: 101, Monster1Variant: 0, Monster2ID: 102, Monster2Variant: 1,
|
||||
StatTable: 3, SpawnWeighting: 10},
|
||||
{Monster1ID: 103, Monster1Variant: 2, Monster2ID: 104, Monster2Variant: 0,
|
||||
SpawnWeighting: 20},
|
||||
}
|
||||
floors := []FloorConfig{
|
||||
{FloorNumber: 1, SpawnTableIndex: 0, PointMulti1: 1.0, PointMulti2: 1.5},
|
||||
{FloorNumber: 2, SpawnTableIndex: 1, PointMulti1: 1.2, PointMulti2: 2.0},
|
||||
{FloorNumber: 3, SpawnTableIndex: 0, PointMulti1: 1.5, PointMulti2: 2.5, FinalLoop: 1},
|
||||
}
|
||||
soloFloors := []FloorConfig{
|
||||
{FloorNumber: 1, SpawnTableIndex: 0, PointMulti1: 1.0, PointMulti2: 1.5},
|
||||
{FloorNumber: 2, SpawnTableIndex: 0, PointMulti1: 1.2, PointMulti2: 2.0},
|
||||
}
|
||||
return RengokuConfig{
|
||||
MultiRoad: RoadConfig{Floors: floors, SpawnTables: spawnTables},
|
||||
SoloRoad: RoadConfig{Floors: soloFloors, SpawnTables: spawnTables[1:]},
|
||||
}
|
||||
}
|
||||
|
||||
// TestBuildRengokuBinary_RoundTrip builds a binary from a config and verifies
|
||||
// that parseRengokuBinary accepts it and reports the expected summary.
|
||||
func TestBuildRengokuBinary_RoundTrip(t *testing.T) {
|
||||
cfg := sampleRengokuConfig()
|
||||
|
||||
bin, err := BuildRengokuBinary(cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("BuildRengokuBinary: %v", err)
|
||||
}
|
||||
|
||||
info, err := parseRengokuBinary(bin)
|
||||
if err != nil {
|
||||
t.Fatalf("parseRengokuBinary on built binary: %v", err)
|
||||
}
|
||||
|
||||
if info.MultiFloors != len(cfg.MultiRoad.Floors) {
|
||||
t.Errorf("MultiFloors = %d, want %d", info.MultiFloors, len(cfg.MultiRoad.Floors))
|
||||
}
|
||||
if info.MultiSpawnTables != len(cfg.MultiRoad.SpawnTables) {
|
||||
t.Errorf("MultiSpawnTables = %d, want %d", info.MultiSpawnTables, len(cfg.MultiRoad.SpawnTables))
|
||||
}
|
||||
if info.SoloFloors != len(cfg.SoloRoad.Floors) {
|
||||
t.Errorf("SoloFloors = %d, want %d", info.SoloFloors, len(cfg.SoloRoad.Floors))
|
||||
}
|
||||
if info.SoloSpawnTables != len(cfg.SoloRoad.SpawnTables) {
|
||||
t.Errorf("SoloSpawnTables = %d, want %d", info.SoloSpawnTables, len(cfg.SoloRoad.SpawnTables))
|
||||
}
|
||||
// Unique monsters: multi has 101,102,103,104; solo has 103,104 → 4 total
|
||||
if info.UniqueMonsters != 4 {
|
||||
t.Errorf("UniqueMonsters = %d, want 4", info.UniqueMonsters)
|
||||
}
|
||||
}
|
||||
|
||||
// TestBuildRengokuBinary_FloatFields verifies that PointMulti1/2 values
|
||||
// survive the binary encoding intact.
|
||||
func TestBuildRengokuBinary_FloatFields(t *testing.T) {
|
||||
cfg := RengokuConfig{
|
||||
MultiRoad: RoadConfig{
|
||||
Floors: []FloorConfig{
|
||||
{FloorNumber: 1, SpawnTableIndex: 0, PointMulti1: 1.25, PointMulti2: 3.75},
|
||||
},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 1}},
|
||||
},
|
||||
SoloRoad: RoadConfig{
|
||||
Floors: []FloorConfig{{FloorNumber: 1, SpawnTableIndex: 0}},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 2}},
|
||||
},
|
||||
}
|
||||
|
||||
bin, err := BuildRengokuBinary(cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("BuildRengokuBinary: %v", err)
|
||||
}
|
||||
|
||||
// Re-parse the binary and check that we can read back the float fields.
|
||||
// The floor stats for multiDef start at rengokuMinSize (0x44).
|
||||
// Layout: floorNumber(4) + spawnTableIndex(4) + unk0(4) + pointMulti1(4) + pointMulti2(4)
|
||||
floorBase := rengokuMinSize // 0x44
|
||||
pm1Bits := uint32(bin[floorBase+12]) | uint32(bin[floorBase+13])<<8 |
|
||||
uint32(bin[floorBase+14])<<16 | uint32(bin[floorBase+15])<<24
|
||||
pm2Bits := uint32(bin[floorBase+16]) | uint32(bin[floorBase+17])<<8 |
|
||||
uint32(bin[floorBase+18])<<16 | uint32(bin[floorBase+19])<<24
|
||||
|
||||
if got := math.Float32frombits(pm1Bits); got != 1.25 {
|
||||
t.Errorf("PointMulti1 = %f, want 1.25", got)
|
||||
}
|
||||
if got := math.Float32frombits(pm2Bits); got != 3.75 {
|
||||
t.Errorf("PointMulti2 = %f, want 3.75", got)
|
||||
}
|
||||
}
|
||||
|
||||
// TestBuildRengokuBinary_ValidationErrors verifies that out-of-range
|
||||
// spawn_table_index values are caught before the binary is built.
|
||||
func TestBuildRengokuBinary_ValidationErrors(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
cfg RengokuConfig
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "multi_index_out_of_range",
|
||||
cfg: RengokuConfig{
|
||||
MultiRoad: RoadConfig{
|
||||
Floors: []FloorConfig{{FloorNumber: 1, SpawnTableIndex: 5}},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 1}},
|
||||
},
|
||||
SoloRoad: RoadConfig{
|
||||
Floors: []FloorConfig{{FloorNumber: 1, SpawnTableIndex: 0}},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 2}},
|
||||
},
|
||||
},
|
||||
wantErr: "multi_road",
|
||||
},
|
||||
{
|
||||
name: "solo_index_out_of_range",
|
||||
cfg: RengokuConfig{
|
||||
MultiRoad: RoadConfig{
|
||||
Floors: []FloorConfig{{FloorNumber: 1, SpawnTableIndex: 0}},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 1}},
|
||||
},
|
||||
SoloRoad: RoadConfig{
|
||||
Floors: []FloorConfig{{FloorNumber: 1, SpawnTableIndex: 99}},
|
||||
SpawnTables: []SpawnTableConfig{{Monster1ID: 2}},
|
||||
},
|
||||
},
|
||||
wantErr: "solo_road",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
_, err := BuildRengokuBinary(tc.cfg)
|
||||
if err == nil {
|
||||
t.Fatal("expected error, got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), tc.wantErr) {
|
||||
t.Errorf("error %q does not contain %q", err.Error(), tc.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestLoadRengokuBinary_BinPreferredOverJSON writes both a JSON file and a
|
||||
// .bin file and verifies that the .bin source is used (consistent with the
|
||||
// quest and scenario loaders).
|
||||
func TestLoadRengokuBinary_BinPreferredOverJSON(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logger, _ := zap.NewDevelopment()
|
||||
|
||||
// Write a valid rengoku_data.json (would produce a much larger binary).
|
||||
cfg := sampleRengokuConfig()
|
||||
jsonBytes, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(dir, "rengoku_data.json"), jsonBytes, 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Write a minimal valid-magic .bin — should be preferred over JSON.
|
||||
binData := make([]byte, 16) // 16-byte ECD header, zero payload
|
||||
binData[0], binData[1], binData[2], binData[3] = 0x65, 0x63, 0x64, 0x1A
|
||||
if err := os.WriteFile(filepath.Join(dir, "rengoku_data.bin"), binData, 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
result := loadRengokuBinary(dir, logger)
|
||||
if result == nil {
|
||||
t.Fatal("expected non-nil result")
|
||||
}
|
||||
// The JSON-built binary would be much larger; 16 bytes confirms .bin was used.
|
||||
if len(result) != 16 {
|
||||
t.Errorf("result is %d bytes — looks like JSON was used instead of .bin", len(result))
|
||||
}
|
||||
}
|
||||
|
||||
// TestLoadRengokuBinary_JSONFallbackWhenNoBin verifies that when no .bin file
|
||||
// is present, loadRengokuBinary falls back to rengoku_data.json.
|
||||
func TestLoadRengokuBinary_JSONFallbackWhenNoBin(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logger, _ := zap.NewDevelopment()
|
||||
|
||||
cfg := sampleRengokuConfig()
|
||||
jsonBytes, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(dir, "rengoku_data.json"), jsonBytes, 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
result := loadRengokuBinary(dir, logger)
|
||||
if result == nil {
|
||||
t.Fatal("expected fallback to JSON, got nil")
|
||||
}
|
||||
// JSON-built result is much larger than 16 bytes.
|
||||
if len(result) <= 16 {
|
||||
t.Errorf("result is %d bytes — JSON fallback likely did not run", len(result))
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package channelserver
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
@@ -49,10 +50,24 @@ func (r *CharacterRepository) LoadColumn(charID uint32, column string) ([]byte,
|
||||
return data, err
|
||||
}
|
||||
|
||||
// ErrCharacterNotFound is returned by write methods when no character row is matched.
|
||||
var ErrCharacterNotFound = errors.New("character not found")
|
||||
|
||||
// SaveColumn writes a single []byte column by character ID.
|
||||
// Returns ErrCharacterNotFound if no row was updated (character does not exist).
|
||||
func (r *CharacterRepository) SaveColumn(charID uint32, column string, data []byte) error {
|
||||
_, err := r.db.Exec("UPDATE characters SET "+column+"=$1 WHERE id=$2", data, charID)
|
||||
return err
|
||||
result, err := r.db.Exec("UPDATE characters SET "+column+"=$1 WHERE id=$2", data, charID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n == 0 {
|
||||
return fmt.Errorf("SaveColumn %s for char %d: %w", column, charID, ErrCharacterNotFound)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadInt reads a single integer column (0 for NULL) by character ID.
|
||||
@@ -226,13 +241,26 @@ func (r *CharacterRepository) ReadGuildPostChecked(charID uint32) (time.Time, er
|
||||
// When rastaID is 0, only the mercenary blob is saved — the existing rasta_id
|
||||
// (typically NULL for characters without a mercenary) is preserved. Writing 0
|
||||
// would pollute GetMercenaryLoans queries that match on pact_id.
|
||||
// Returns ErrCharacterNotFound if no row was updated.
|
||||
func (r *CharacterRepository) SaveMercenary(charID uint32, data []byte, rastaID uint32) error {
|
||||
var result sql.Result
|
||||
var err error
|
||||
if rastaID == 0 {
|
||||
_, err := r.db.Exec("UPDATE characters SET savemercenary=$1 WHERE id=$2", data, charID)
|
||||
result, err = r.db.Exec("UPDATE characters SET savemercenary=$1 WHERE id=$2", data, charID)
|
||||
} else {
|
||||
result, err = r.db.Exec("UPDATE characters SET savemercenary=$1, rasta_id=$2 WHERE id=$3", data, rastaID, charID)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err := r.db.Exec("UPDATE characters SET savemercenary=$1, rasta_id=$2 WHERE id=$3", data, rastaID, charID)
|
||||
return err
|
||||
n, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n == 0 {
|
||||
return fmt.Errorf("SaveMercenary for char %d: %w", charID, ErrCharacterNotFound)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateGCPAndPact updates gcp and pact_id atomically.
|
||||
@@ -241,6 +269,38 @@ func (r *CharacterRepository) UpdateGCPAndPact(charID uint32, gcp uint32, pactID
|
||||
return err
|
||||
}
|
||||
|
||||
// SavedataBackup holds one row from the savedata_backups table.
|
||||
type SavedataBackup struct {
|
||||
Slot int
|
||||
Data []byte
|
||||
SavedAt time.Time
|
||||
}
|
||||
|
||||
// LoadBackupsByRecency returns all backup slots for a character, ordered
|
||||
// most-recent first. Returns an empty (non-nil) slice if no backups exist.
|
||||
func (r *CharacterRepository) LoadBackupsByRecency(charID uint32) ([]SavedataBackup, error) {
|
||||
rows, err := r.db.Query(
|
||||
`SELECT slot, savedata, saved_at FROM savedata_backups
|
||||
WHERE char_id = $1
|
||||
ORDER BY saved_at DESC`,
|
||||
charID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close() //nolint:errcheck // rows.Close error is non-actionable here
|
||||
|
||||
backups := make([]SavedataBackup, 0)
|
||||
for rows.Next() {
|
||||
var b SavedataBackup
|
||||
if err := rows.Scan(&b.Slot, &b.Data, &b.SavedAt); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
backups = append(backups, b)
|
||||
}
|
||||
return backups, rows.Err()
|
||||
}
|
||||
|
||||
// SaveBackup upserts a savedata snapshot into the rotating backup table.
|
||||
func (r *CharacterRepository) SaveBackup(charID uint32, slot int, data []byte) error {
|
||||
_, err := r.db.Exec(`
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
@@ -75,3 +78,149 @@ func (r *DivaRepository) GetTotalPoints(eventID uint32) (int64, int64, error) {
|
||||
}
|
||||
return qp, bp, nil
|
||||
}
|
||||
|
||||
// GetBeads returns all active bead types from the diva_beads table.
|
||||
func (r *DivaRepository) GetBeads() ([]int, error) {
|
||||
var types []int
|
||||
err := r.db.Select(&types, "SELECT type FROM diva_beads ORDER BY id")
|
||||
return types, err
|
||||
}
|
||||
|
||||
// AssignBead inserts a bead assignment for a character, replacing any existing one for that bead slot.
|
||||
func (r *DivaRepository) AssignBead(characterID uint32, beadIndex int, expiry time.Time) error {
|
||||
_, err := r.db.Exec(`
|
||||
INSERT INTO diva_beads_assignment (character_id, bead_index, expiry)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT DO NOTHING`,
|
||||
characterID, beadIndex, expiry)
|
||||
return err
|
||||
}
|
||||
|
||||
// AddBeadPoints records a bead point contribution for a character.
|
||||
func (r *DivaRepository) AddBeadPoints(characterID uint32, beadIndex int, points int) error {
|
||||
_, err := r.db.Exec(
|
||||
"INSERT INTO diva_beads_points (character_id, bead_index, points) VALUES ($1, $2, $3)",
|
||||
characterID, beadIndex, points)
|
||||
return err
|
||||
}
|
||||
|
||||
// GetCharacterBeadPoints returns the summed points per bead_index for a character.
|
||||
func (r *DivaRepository) GetCharacterBeadPoints(characterID uint32) (map[int]int, error) {
|
||||
rows, err := r.db.Query(
|
||||
"SELECT bead_index, COALESCE(SUM(points),0) FROM diva_beads_points WHERE character_id=$1 GROUP BY bead_index",
|
||||
characterID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() { _ = rows.Close() }()
|
||||
result := make(map[int]int)
|
||||
for rows.Next() {
|
||||
var idx, pts int
|
||||
if err := rows.Scan(&idx, &pts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result[idx] = pts
|
||||
}
|
||||
return result, rows.Err()
|
||||
}
|
||||
|
||||
// GetTotalBeadPoints returns the sum of all points across all characters and bead slots.
|
||||
func (r *DivaRepository) GetTotalBeadPoints() (int64, error) {
|
||||
var total int64
|
||||
err := r.db.QueryRow("SELECT COALESCE(SUM(points),0) FROM diva_beads_points").Scan(&total)
|
||||
return total, err
|
||||
}
|
||||
|
||||
// GetTopBeadPerDay returns the bead_index with the most points contributed on day offset `day`
|
||||
// (0 = today, 1 = yesterday, etc.). Returns 0 if no data exists for that day.
|
||||
func (r *DivaRepository) GetTopBeadPerDay(day int) (int, error) {
|
||||
var beadIndex int
|
||||
err := r.db.QueryRow(`
|
||||
SELECT bead_index
|
||||
FROM diva_beads_points
|
||||
WHERE timestamp >= (NOW() - ($1 + 1) * INTERVAL '1 day')
|
||||
AND timestamp < (NOW() - $1 * INTERVAL '1 day')
|
||||
GROUP BY bead_index
|
||||
ORDER BY SUM(points) DESC
|
||||
LIMIT 1`,
|
||||
day).Scan(&beadIndex)
|
||||
if err != nil {
|
||||
return 0, nil // no data for this day is not an error
|
||||
}
|
||||
return beadIndex, nil
|
||||
}
|
||||
|
||||
// CleanupBeads deletes all rows from diva_beads, diva_beads_assignment, and diva_beads_points.
|
||||
func (r *DivaRepository) CleanupBeads() error {
|
||||
if _, err := r.db.Exec("DELETE FROM diva_beads_points"); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := r.db.Exec("DELETE FROM diva_beads_assignment"); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err := r.db.Exec("DELETE FROM diva_beads")
|
||||
return err
|
||||
}
|
||||
|
||||
// GetPersonalPrizes returns all prize rows with type='personal', ordered by points_req.
|
||||
func (r *DivaRepository) GetPersonalPrizes() ([]DivaPrize, error) {
|
||||
return r.getPrizesByType("personal")
|
||||
}
|
||||
|
||||
// GetGuildPrizes returns all prize rows with type='guild', ordered by points_req.
|
||||
func (r *DivaRepository) GetGuildPrizes() ([]DivaPrize, error) {
|
||||
return r.getPrizesByType("guild")
|
||||
}
|
||||
|
||||
func (r *DivaRepository) getPrizesByType(prizeType string) ([]DivaPrize, error) {
|
||||
rows, err := r.db.Query(`
|
||||
SELECT id, type, points_req, item_type, item_id, quantity, gr, repeatable
|
||||
FROM diva_prizes
|
||||
WHERE type=$1
|
||||
ORDER BY points_req`,
|
||||
prizeType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() { _ = rows.Close() }()
|
||||
var prizes []DivaPrize
|
||||
for rows.Next() {
|
||||
var p DivaPrize
|
||||
if err := rows.Scan(&p.ID, &p.Type, &p.PointsReq, &p.ItemType, &p.ItemID, &p.Quantity, &p.GR, &p.Repeatable); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prizes = append(prizes, p)
|
||||
}
|
||||
return prizes, rows.Err()
|
||||
}
|
||||
|
||||
// GetCharacterInterceptionPoints returns the interception_points JSON map from guild_characters.
|
||||
func (r *DivaRepository) GetCharacterInterceptionPoints(characterID uint32) (map[string]int, error) {
|
||||
var raw []byte
|
||||
err := r.db.QueryRow(
|
||||
"SELECT interception_points FROM guild_characters WHERE char_id=$1",
|
||||
characterID).Scan(&raw)
|
||||
if err != nil {
|
||||
return map[string]int{}, nil
|
||||
}
|
||||
result := make(map[string]int)
|
||||
if len(raw) > 0 {
|
||||
if err := json.Unmarshal(raw, &result); err != nil {
|
||||
return map[string]int{}, nil
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// AddInterceptionPoints increments the interception points for a quest file ID in guild_characters.
|
||||
func (r *DivaRepository) AddInterceptionPoints(characterID uint32, questFileID int, points int) error {
|
||||
_, err := r.db.Exec(`
|
||||
UPDATE guild_characters
|
||||
SET interception_points = interception_points || jsonb_build_object(
|
||||
$2::text,
|
||||
COALESCE((interception_points->>$2::text)::int, 0) + $3
|
||||
)
|
||||
WHERE char_id=$1`,
|
||||
characterID, questFileID, points)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
@@ -34,6 +35,7 @@ SELECT
|
||||
leader_id,
|
||||
c.name AS leader_name,
|
||||
comment,
|
||||
return_type,
|
||||
COALESCE(pugi_name_1, '') AS pugi_name_1,
|
||||
COALESCE(pugi_name_2, '') AS pugi_name_2,
|
||||
COALESCE(pugi_name_3, '') AS pugi_name_3,
|
||||
@@ -195,6 +197,62 @@ func (r *GuildRepository) Create(leaderCharID uint32, guildName string) (int32,
|
||||
return guildID, nil
|
||||
}
|
||||
|
||||
// FindOrCreateReturnGuild finds an existing return guild of the given type with fewer
|
||||
// than 60 members, or creates a new one. The name template receives the guild count+1
|
||||
// as its single %d argument. Returns the guild ID.
|
||||
func (r *GuildRepository) FindOrCreateReturnGuild(returnType uint8, nameTemplate string) (uint32, error) {
|
||||
var guildID uint32
|
||||
err := r.db.QueryRow(`
|
||||
SELECT g.id FROM guilds g
|
||||
WHERE g.return_type = $1
|
||||
AND (SELECT COUNT(1) FROM guild_characters gc WHERE gc.guild_id = g.id) < 60
|
||||
LIMIT 1
|
||||
`, returnType).Scan(&guildID)
|
||||
if err == nil {
|
||||
return guildID, nil
|
||||
}
|
||||
if !errors.Is(err, sql.ErrNoRows) {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// No suitable guild — count existing ones and create a new one.
|
||||
var count int
|
||||
if err := r.db.QueryRow(
|
||||
`SELECT COUNT(1) FROM guilds WHERE return_type = $1`, returnType,
|
||||
).Scan(&count); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
tx, err := r.db.BeginTxx(context.Background(), nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() { _ = tx.Rollback() }()
|
||||
|
||||
name := fmt.Sprintf(nameTemplate, count+1)
|
||||
if err := tx.QueryRow(
|
||||
`INSERT INTO guilds (name, leader_id, return_type, rank_rp) VALUES ($1, 0, $2, 1200) RETURNING id`,
|
||||
name, returnType,
|
||||
).Scan(&guildID); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return guildID, nil
|
||||
}
|
||||
|
||||
// AddMember inserts a character into a guild's member list.
|
||||
func (r *GuildRepository) AddMember(guildID, charID uint32) error {
|
||||
_, err := r.db.Exec(`
|
||||
INSERT INTO guild_characters (guild_id, character_id, order_index)
|
||||
VALUES ($1, $2, (SELECT COALESCE(MAX(order_index), 0) + 1 FROM guild_characters WHERE guild_id = $1))
|
||||
ON CONFLICT (guild_id, character_id) DO NOTHING
|
||||
`, guildID, charID)
|
||||
return err
|
||||
}
|
||||
|
||||
// Save persists guild metadata changes.
|
||||
func (r *GuildRepository) Save(guild *Guild) error {
|
||||
_, err := r.db.Exec(`
|
||||
@@ -270,8 +328,9 @@ func (r *GuildRepository) CreateApplication(guildID, charID, actorID uint32, app
|
||||
return err
|
||||
}
|
||||
|
||||
// CreateApplicationWithMail atomically creates an application and sends a notification mail.
|
||||
func (r *GuildRepository) CreateApplicationWithMail(guildID, charID, actorID uint32, appType GuildApplicationType, mailSenderID, mailRecipientID uint32, mailSubject, mailBody string) error {
|
||||
// CreateInviteWithMail atomically inserts a scout invitation into guild_invites
|
||||
// and sends a notification mail to the target character.
|
||||
func (r *GuildRepository) CreateInviteWithMail(guildID, charID, actorID uint32, mailSenderID, mailRecipientID uint32, mailSubject, mailBody string) error {
|
||||
tx, err := r.db.BeginTxx(context.Background(), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -279,8 +338,8 @@ func (r *GuildRepository) CreateApplicationWithMail(guildID, charID, actorID uin
|
||||
defer func() { _ = tx.Rollback() }()
|
||||
|
||||
if _, err := tx.Exec(
|
||||
`INSERT INTO guild_applications (guild_id, character_id, actor_id, application_type) VALUES ($1, $2, $3, $4)`,
|
||||
guildID, charID, actorID, appType); err != nil {
|
||||
`INSERT INTO guild_invites (guild_id, character_id, actor_id) VALUES ($1, $2, $3)`,
|
||||
guildID, charID, actorID); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tx.Exec(mailInsertQuery, mailSenderID, mailRecipientID, mailSubject, mailBody, 0, 0, true, false); err != nil {
|
||||
@@ -289,11 +348,55 @@ func (r *GuildRepository) CreateApplicationWithMail(guildID, charID, actorID uin
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// CancelInvitation removes an invitation for a character.
|
||||
func (r *GuildRepository) CancelInvitation(guildID, charID uint32) error {
|
||||
// HasInvite reports whether a pending scout invitation exists for the character in the guild.
|
||||
func (r *GuildRepository) HasInvite(guildID, charID uint32) (bool, error) {
|
||||
var n int
|
||||
err := r.db.QueryRow(
|
||||
`SELECT 1 FROM guild_invites WHERE guild_id = $1 AND character_id = $2`,
|
||||
guildID, charID,
|
||||
).Scan(&n)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// CancelInvite removes a scout invitation by its primary key.
|
||||
func (r *GuildRepository) CancelInvite(inviteID uint32) error {
|
||||
_, err := r.db.Exec(`DELETE FROM guild_invites WHERE id = $1`, inviteID)
|
||||
return err
|
||||
}
|
||||
|
||||
// AcceptInvite removes the scout invitation and adds the character to the guild atomically.
|
||||
func (r *GuildRepository) AcceptInvite(guildID, charID uint32) error {
|
||||
tx, err := r.db.BeginTxx(context.Background(), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = tx.Rollback() }()
|
||||
|
||||
if _, err := tx.Exec(
|
||||
`DELETE FROM guild_invites WHERE guild_id = $1 AND character_id = $2`,
|
||||
guildID, charID); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tx.Exec(`
|
||||
INSERT INTO guild_characters (guild_id, character_id, order_index)
|
||||
VALUES ($1, $2, (SELECT MAX(order_index) + 1 FROM guild_characters WHERE guild_id = $1))
|
||||
`, guildID, charID); err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// DeclineInvite removes a scout invitation without joining the guild.
|
||||
func (r *GuildRepository) DeclineInvite(guildID, charID uint32) error {
|
||||
_, err := r.db.Exec(
|
||||
`DELETE FROM guild_applications WHERE character_id = $1 AND guild_id = $2 AND application_type = 'invited'`,
|
||||
charID, guildID,
|
||||
`DELETE FROM guild_invites WHERE guild_id = $1 AND character_id = $2`,
|
||||
guildID, charID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
@@ -433,34 +536,39 @@ func (r *GuildRepository) SetRecruiter(charID uint32, allowed bool) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// ScoutedCharacter represents an invited character in the scout list.
|
||||
type ScoutedCharacter struct {
|
||||
CharID uint32 `db:"id"`
|
||||
Name string `db:"name"`
|
||||
HR uint16 `db:"hr"`
|
||||
GR uint16 `db:"gr"`
|
||||
ActorID uint32 `db:"actor_id"`
|
||||
// GuildInvite represents a pending scout invitation with the target character's info.
|
||||
type GuildInvite struct {
|
||||
ID uint32 `db:"id"`
|
||||
GuildID uint32 `db:"guild_id"`
|
||||
CharID uint32 `db:"character_id"`
|
||||
ActorID uint32 `db:"actor_id"`
|
||||
InvitedAt time.Time `db:"created_at"`
|
||||
HR uint16 `db:"hr"`
|
||||
GR uint16 `db:"gr"`
|
||||
Name string `db:"name"`
|
||||
}
|
||||
|
||||
// ListInvitedCharacters returns all characters with pending guild invitations.
|
||||
func (r *GuildRepository) ListInvitedCharacters(guildID uint32) ([]*ScoutedCharacter, error) {
|
||||
// ListInvites returns all pending scout invitations for a guild, including
|
||||
// the target character's HR, GR, and name.
|
||||
func (r *GuildRepository) ListInvites(guildID uint32) ([]*GuildInvite, error) {
|
||||
rows, err := r.db.Queryx(`
|
||||
SELECT c.id, c.name, c.hr, c.gr, ga.actor_id
|
||||
FROM guild_applications ga
|
||||
JOIN characters c ON c.id = ga.character_id
|
||||
WHERE ga.guild_id = $1 AND ga.application_type = 'invited'
|
||||
SELECT gi.id, gi.guild_id, gi.character_id, gi.actor_id, gi.created_at,
|
||||
c.hr, c.gr, c.name
|
||||
FROM guild_invites gi
|
||||
JOIN characters c ON c.id = gi.character_id
|
||||
WHERE gi.guild_id = $1
|
||||
`, guildID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() { _ = rows.Close() }()
|
||||
var chars []*ScoutedCharacter
|
||||
var invites []*GuildInvite
|
||||
for rows.Next() {
|
||||
sc := &ScoutedCharacter{}
|
||||
if err := rows.StructScan(sc); err != nil {
|
||||
inv := &GuildInvite{}
|
||||
if err := rows.StructScan(inv); err != nil {
|
||||
continue
|
||||
}
|
||||
chars = append(chars, sc)
|
||||
invites = append(invites, inv)
|
||||
}
|
||||
return chars, nil
|
||||
return invites, nil
|
||||
}
|
||||
|
||||
227
server/channelserver/repo_guild_subsystems_test.go
Normal file
227
server/channelserver/repo_guild_subsystems_test.go
Normal file
@@ -0,0 +1,227 @@
|
||||
package channelserver
|
||||
|
||||
// Tests for guild subsystem methods not covered by repo_guild_test.go:
|
||||
// - SetAllianceRecruiting (repo_guild_alliance.go)
|
||||
// - RolloverDailyRP (repo_guild_rp.go)
|
||||
// - AddWeeklyBonusUsers (repo_guild_rp.go)
|
||||
// - InsertKillLog (repo_guild_hunt.go)
|
||||
// - ClearTreasureHunt (repo_guild_hunt.go)
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestSetAllianceRecruiting(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "sar_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "SAR_Leader")
|
||||
guildID := CreateTestGuild(t, db, charID, "SAR_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
if err := repo.CreateAlliance("SAR_Alliance", guildID); err != nil {
|
||||
t.Fatalf("CreateAlliance failed: %v", err)
|
||||
}
|
||||
alliances, err := repo.ListAlliances()
|
||||
if err != nil {
|
||||
t.Fatalf("ListAlliances failed: %v", err)
|
||||
}
|
||||
if len(alliances) == 0 {
|
||||
t.Fatal("Expected at least 1 alliance")
|
||||
}
|
||||
allianceID := alliances[0].ID
|
||||
|
||||
// Default should be false.
|
||||
if alliances[0].Recruiting {
|
||||
t.Error("Expected initial Recruiting=false")
|
||||
}
|
||||
|
||||
if err := repo.SetAllianceRecruiting(allianceID, true); err != nil {
|
||||
t.Fatalf("SetAllianceRecruiting(true) failed: %v", err)
|
||||
}
|
||||
alliance, err := repo.GetAllianceByID(allianceID)
|
||||
if err != nil {
|
||||
t.Fatalf("GetAllianceByID after set true failed: %v", err)
|
||||
}
|
||||
if !alliance.Recruiting {
|
||||
t.Error("Expected Recruiting=true after SetAllianceRecruiting(true)")
|
||||
}
|
||||
|
||||
if err := repo.SetAllianceRecruiting(allianceID, false); err != nil {
|
||||
t.Fatalf("SetAllianceRecruiting(false) failed: %v", err)
|
||||
}
|
||||
alliance, err = repo.GetAllianceByID(allianceID)
|
||||
if err != nil {
|
||||
t.Fatalf("GetAllianceByID after set false failed: %v", err)
|
||||
}
|
||||
if alliance.Recruiting {
|
||||
t.Error("Expected Recruiting=false after SetAllianceRecruiting(false)")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRolloverDailyRP(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "rollover_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "Rollover_Leader")
|
||||
guildID := CreateTestGuild(t, db, charID, "Rollover_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
// Set rp_today for the member so we can verify the rollover.
|
||||
if _, err := db.Exec("UPDATE guild_characters SET rp_today = 50 WHERE character_id = $1", charID); err != nil {
|
||||
t.Fatalf("Failed to set rp_today: %v", err)
|
||||
}
|
||||
|
||||
noon := time.Now().UTC()
|
||||
if err := repo.RolloverDailyRP(guildID, noon); err != nil {
|
||||
t.Fatalf("RolloverDailyRP failed: %v", err)
|
||||
}
|
||||
|
||||
var rpToday, rpYesterday int
|
||||
if err := db.QueryRow("SELECT rp_today, rp_yesterday FROM guild_characters WHERE character_id = $1", charID).
|
||||
Scan(&rpToday, &rpYesterday); err != nil {
|
||||
t.Fatalf("Failed to read rp values: %v", err)
|
||||
}
|
||||
if rpToday != 0 {
|
||||
t.Errorf("Expected rp_today=0 after rollover, got %d", rpToday)
|
||||
}
|
||||
if rpYesterday != 50 {
|
||||
t.Errorf("Expected rp_yesterday=50 after rollover, got %d", rpYesterday)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRolloverDailyRP_Idempotent(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "idem_rollover_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "IdemRollLeader")
|
||||
guildID := CreateTestGuild(t, db, charID, "Idem_Rollover_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
if _, err := db.Exec("UPDATE guild_characters SET rp_today = 100 WHERE character_id = $1", charID); err != nil {
|
||||
t.Fatalf("Failed to set rp_today: %v", err)
|
||||
}
|
||||
|
||||
noon := time.Now().UTC()
|
||||
if err := repo.RolloverDailyRP(guildID, noon); err != nil {
|
||||
t.Fatalf("First RolloverDailyRP failed: %v", err)
|
||||
}
|
||||
// Second call with same noon should be a no-op (rp_reset_at >= noon).
|
||||
if err := repo.RolloverDailyRP(guildID, noon); err != nil {
|
||||
t.Fatalf("Second RolloverDailyRP (idempotent) failed: %v", err)
|
||||
}
|
||||
|
||||
var rpToday int
|
||||
_ = db.QueryRow("SELECT rp_today FROM guild_characters WHERE character_id = $1", charID).Scan(&rpToday)
|
||||
if rpToday != 0 {
|
||||
t.Errorf("Expected rp_today=0 after idempotent rollover, got %d", rpToday)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddWeeklyBonusUsers(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "wbu_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "WBU_Leader")
|
||||
guildID := CreateTestGuild(t, db, charID, "WBU_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
if err := repo.AddWeeklyBonusUsers(guildID, 3); err != nil {
|
||||
t.Fatalf("AddWeeklyBonusUsers failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify the column incremented.
|
||||
var wbu int
|
||||
if err := db.QueryRow("SELECT weekly_bonus_users FROM guilds WHERE id = $1", guildID).Scan(&wbu); err != nil {
|
||||
t.Fatalf("Failed to read weekly_bonus_users: %v", err)
|
||||
}
|
||||
if wbu != 3 {
|
||||
t.Errorf("Expected weekly_bonus_users=3, got %d", wbu)
|
||||
}
|
||||
|
||||
// Add again and verify accumulation.
|
||||
if err := repo.AddWeeklyBonusUsers(guildID, 2); err != nil {
|
||||
t.Fatalf("Second AddWeeklyBonusUsers failed: %v", err)
|
||||
}
|
||||
if err := db.QueryRow("SELECT weekly_bonus_users FROM guilds WHERE id = $1", guildID).Scan(&wbu); err != nil {
|
||||
t.Fatalf("Failed to read weekly_bonus_users after second add: %v", err)
|
||||
}
|
||||
if wbu != 5 {
|
||||
t.Errorf("Expected weekly_bonus_users=5 after second add, got %d", wbu)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertKillLogAndCount(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "kill_log_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "Kill_Logger")
|
||||
guildID := CreateTestGuild(t, db, charID, "Kill_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
// Set box_claimed to 1 hour ago so kills inserted now are within the window.
|
||||
if _, err := db.Exec("UPDATE guild_characters SET box_claimed = now() - interval '1 hour' WHERE character_id = $1", charID); err != nil {
|
||||
t.Fatalf("Failed to set box_claimed: %v", err)
|
||||
}
|
||||
|
||||
if err := repo.InsertKillLog(charID, 42, 2, time.Now()); err != nil {
|
||||
t.Fatalf("InsertKillLog failed: %v", err)
|
||||
}
|
||||
|
||||
count, err := repo.CountGuildKills(guildID, charID)
|
||||
if err != nil {
|
||||
t.Fatalf("CountGuildKills failed: %v", err)
|
||||
}
|
||||
if count != 1 {
|
||||
t.Errorf("Expected 1 kill log entry, got %d", count)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClearTreasureHunt(t *testing.T) {
|
||||
db := SetupTestDB(t)
|
||||
defer TeardownTestDB(t, db)
|
||||
|
||||
userID := CreateTestUser(t, db, "cth_user")
|
||||
charID := CreateTestCharacter(t, db, userID, "CTH_Leader")
|
||||
guildID := CreateTestGuild(t, db, charID, "CTH_Guild")
|
||||
repo := NewGuildRepository(db)
|
||||
|
||||
// Create and register a hunt.
|
||||
if err := repo.CreateHunt(guildID, charID, 7, 1, []byte{}, ""); err != nil {
|
||||
t.Fatalf("CreateHunt failed: %v", err)
|
||||
}
|
||||
hunt, err := repo.GetPendingHunt(charID)
|
||||
if err != nil || hunt == nil {
|
||||
t.Fatalf("GetPendingHunt failed or nil: %v", err)
|
||||
}
|
||||
if err := repo.RegisterHuntReport(hunt.HuntID, charID); err != nil {
|
||||
t.Fatalf("RegisterHuntReport failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify treasure_hunt is set.
|
||||
var th interface{}
|
||||
if err := db.QueryRow("SELECT treasure_hunt FROM guild_characters WHERE character_id = $1", charID).Scan(&th); err != nil {
|
||||
t.Fatalf("Failed to read treasure_hunt: %v", err)
|
||||
}
|
||||
if th == nil {
|
||||
t.Error("Expected treasure_hunt to be set after RegisterHuntReport")
|
||||
}
|
||||
|
||||
// Clear it.
|
||||
if err := repo.ClearTreasureHunt(charID); err != nil {
|
||||
t.Fatalf("ClearTreasureHunt failed: %v", err)
|
||||
}
|
||||
|
||||
if err := db.QueryRow("SELECT treasure_hunt FROM guild_characters WHERE character_id = $1", charID).Scan(&th); err != nil {
|
||||
t.Fatalf("Failed to read treasure_hunt after clear: %v", err)
|
||||
}
|
||||
if th != nil {
|
||||
t.Errorf("Expected treasure_hunt=nil after ClearTreasureHunt, got %v", th)
|
||||
}
|
||||
}
|
||||
@@ -533,66 +533,77 @@ func TestAddMemberDailyRP(t *testing.T) {
|
||||
|
||||
// --- Invitation / Scout tests ---
|
||||
|
||||
func TestCancelInvitation(t *testing.T) {
|
||||
func TestCancelInvite(t *testing.T) {
|
||||
repo, db, guildID, leaderID := setupGuildRepo(t)
|
||||
|
||||
user2 := CreateTestUser(t, db, "invite_user")
|
||||
char2 := CreateTestCharacter(t, db, user2, "Invited")
|
||||
|
||||
if err := repo.CreateApplication(guildID, char2, leaderID, GuildApplicationTypeInvited); err != nil {
|
||||
t.Fatalf("CreateApplication (invited) failed: %v", err)
|
||||
if err := repo.CreateInviteWithMail(guildID, char2, leaderID, leaderID, char2, "Invite", "body"); err != nil {
|
||||
t.Fatalf("CreateInviteWithMail failed: %v", err)
|
||||
}
|
||||
|
||||
if err := repo.CancelInvitation(guildID, char2); err != nil {
|
||||
t.Fatalf("CancelInvitation failed: %v", err)
|
||||
invites, err := repo.ListInvites(guildID)
|
||||
if err != nil || len(invites) != 1 {
|
||||
t.Fatalf("Expected 1 invite, got %d (err: %v)", len(invites), err)
|
||||
}
|
||||
|
||||
has, err := repo.HasApplication(guildID, char2)
|
||||
if err := repo.CancelInvite(invites[0].ID); err != nil {
|
||||
t.Fatalf("CancelInvite failed: %v", err)
|
||||
}
|
||||
|
||||
has, err := repo.HasInvite(guildID, char2)
|
||||
if err != nil {
|
||||
t.Fatalf("HasApplication failed: %v", err)
|
||||
t.Fatalf("HasInvite failed: %v", err)
|
||||
}
|
||||
if has {
|
||||
t.Error("Expected no application after cancellation")
|
||||
t.Error("Expected no invite after cancellation")
|
||||
}
|
||||
}
|
||||
|
||||
func TestListInvitedCharacters(t *testing.T) {
|
||||
func TestListInvites(t *testing.T) {
|
||||
repo, db, guildID, leaderID := setupGuildRepo(t)
|
||||
|
||||
user2 := CreateTestUser(t, db, "scout_user")
|
||||
char2 := CreateTestCharacter(t, db, user2, "Scouted")
|
||||
|
||||
if err := repo.CreateApplication(guildID, char2, leaderID, GuildApplicationTypeInvited); err != nil {
|
||||
t.Fatalf("CreateApplication failed: %v", err)
|
||||
if err := repo.CreateInviteWithMail(guildID, char2, leaderID, leaderID, char2, "Invite", "body"); err != nil {
|
||||
t.Fatalf("CreateInviteWithMail failed: %v", err)
|
||||
}
|
||||
|
||||
chars, err := repo.ListInvitedCharacters(guildID)
|
||||
invites, err := repo.ListInvites(guildID)
|
||||
if err != nil {
|
||||
t.Fatalf("ListInvitedCharacters failed: %v", err)
|
||||
t.Fatalf("ListInvites failed: %v", err)
|
||||
}
|
||||
if len(chars) != 1 {
|
||||
t.Fatalf("Expected 1 invited character, got %d", len(chars))
|
||||
if len(invites) != 1 {
|
||||
t.Fatalf("Expected 1 invite, got %d", len(invites))
|
||||
}
|
||||
if chars[0].CharID != char2 {
|
||||
t.Errorf("Expected char ID %d, got %d", char2, chars[0].CharID)
|
||||
if invites[0].CharID != char2 {
|
||||
t.Errorf("Expected char ID %d, got %d", char2, invites[0].CharID)
|
||||
}
|
||||
if chars[0].Name != "Scouted" {
|
||||
t.Errorf("Expected name 'Scouted', got %q", chars[0].Name)
|
||||
if invites[0].Name != "Scouted" {
|
||||
t.Errorf("Expected name 'Scouted', got %q", invites[0].Name)
|
||||
}
|
||||
if chars[0].ActorID != leaderID {
|
||||
t.Errorf("Expected actor ID %d, got %d", leaderID, chars[0].ActorID)
|
||||
if invites[0].ActorID != leaderID {
|
||||
t.Errorf("Expected actor ID %d, got %d", leaderID, invites[0].ActorID)
|
||||
}
|
||||
if invites[0].ID == 0 {
|
||||
t.Error("Expected non-zero invite ID")
|
||||
}
|
||||
if invites[0].InvitedAt.IsZero() {
|
||||
t.Error("Expected non-zero InvitedAt timestamp")
|
||||
}
|
||||
}
|
||||
|
||||
func TestListInvitedCharactersEmpty(t *testing.T) {
|
||||
func TestListInvitesEmpty(t *testing.T) {
|
||||
repo, _, guildID, _ := setupGuildRepo(t)
|
||||
|
||||
chars, err := repo.ListInvitedCharacters(guildID)
|
||||
invites, err := repo.ListInvites(guildID)
|
||||
if err != nil {
|
||||
t.Fatalf("ListInvitedCharacters failed: %v", err)
|
||||
t.Fatalf("ListInvites failed: %v", err)
|
||||
}
|
||||
if len(chars) != 0 {
|
||||
t.Errorf("Expected 0 invited characters, got %d", len(chars))
|
||||
if len(invites) != 0 {
|
||||
t.Errorf("Expected 0 invites, got %d", len(invites))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1486,28 +1497,26 @@ func TestDisbandCleansUpAlliance(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// --- CreateApplicationWithMail ---
|
||||
// --- CreateInviteWithMail ---
|
||||
|
||||
func TestCreateApplicationWithMail(t *testing.T) {
|
||||
func TestCreateInviteWithMail(t *testing.T) {
|
||||
repo, db, guildID, leaderID := setupGuildRepo(t)
|
||||
|
||||
user2 := CreateTestUser(t, db, "scout_mail_user")
|
||||
char2 := CreateTestCharacter(t, db, user2, "ScoutTarget")
|
||||
|
||||
err := repo.CreateApplicationWithMail(
|
||||
guildID, char2, leaderID, GuildApplicationTypeInvited,
|
||||
leaderID, char2, "Guild Invite", "You have been invited!")
|
||||
err := repo.CreateInviteWithMail(guildID, char2, leaderID, leaderID, char2, "Guild Invite", "You have been invited!")
|
||||
if err != nil {
|
||||
t.Fatalf("CreateApplicationWithMail failed: %v", err)
|
||||
t.Fatalf("CreateInviteWithMail failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify application was created
|
||||
has, err := repo.HasApplication(guildID, char2)
|
||||
// Verify invite was created
|
||||
has, err := repo.HasInvite(guildID, char2)
|
||||
if err != nil {
|
||||
t.Fatalf("HasApplication failed: %v", err)
|
||||
t.Fatalf("HasInvite failed: %v", err)
|
||||
}
|
||||
if !has {
|
||||
t.Error("Expected application to exist after CreateApplicationWithMail")
|
||||
t.Error("Expected invite to exist after CreateInviteWithMail")
|
||||
}
|
||||
|
||||
// Verify mail was sent
|
||||
|
||||
@@ -48,6 +48,9 @@ type CharacterRepo interface {
|
||||
// LoadSaveDataWithHash loads savedata along with its stored SHA-256 hash.
|
||||
// The hash may be nil for characters saved before checksums were introduced.
|
||||
LoadSaveDataWithHash(charID uint32) (id uint32, savedata []byte, isNew bool, name string, hash []byte, err error)
|
||||
// LoadBackupsByRecency returns all backup slots for a character ordered
|
||||
// most-recent first. Returns an empty slice if no backups exist.
|
||||
LoadBackupsByRecency(charID uint32) ([]SavedataBackup, error)
|
||||
}
|
||||
|
||||
// GuildRepo defines the contract for guild data access.
|
||||
@@ -61,8 +64,11 @@ type GuildRepo interface {
|
||||
RemoveCharacter(charID uint32) error
|
||||
AcceptApplication(guildID, charID uint32) error
|
||||
CreateApplication(guildID, charID, actorID uint32, appType GuildApplicationType) error
|
||||
CreateApplicationWithMail(guildID, charID, actorID uint32, appType GuildApplicationType, mailSenderID, mailRecipientID uint32, mailSubject, mailBody string) error
|
||||
CancelInvitation(guildID, charID uint32) error
|
||||
CreateInviteWithMail(guildID, charID, actorID uint32, mailSenderID, mailRecipientID uint32, mailSubject, mailBody string) error
|
||||
HasInvite(guildID, charID uint32) (bool, error)
|
||||
CancelInvite(inviteID uint32) error
|
||||
AcceptInvite(guildID, charID uint32) error
|
||||
DeclineInvite(guildID, charID uint32) error
|
||||
RejectApplication(guildID, charID uint32) error
|
||||
ArrangeCharacters(charIDs []uint32) error
|
||||
GetApplication(guildID, charID uint32, appType GuildApplicationType) (*GuildApplication, error)
|
||||
@@ -117,9 +123,11 @@ type GuildRepo interface {
|
||||
CountGuildKills(guildID, charID uint32) (int, error)
|
||||
ClearTreasureHunt(charID uint32) error
|
||||
InsertKillLog(charID uint32, monster int, quantity uint8, timestamp time.Time) error
|
||||
ListInvitedCharacters(guildID uint32) ([]*ScoutedCharacter, error)
|
||||
ListInvites(guildID uint32) ([]*GuildInvite, error)
|
||||
RolloverDailyRP(guildID uint32, noon time.Time) error
|
||||
AddWeeklyBonusUsers(guildID uint32, numUsers uint8) error
|
||||
FindOrCreateReturnGuild(returnType uint8, nameTemplate string) (uint32, error)
|
||||
AddMember(guildID, charID uint32) error
|
||||
}
|
||||
|
||||
// UserRepo defines the contract for user account data access.
|
||||
@@ -319,6 +327,18 @@ type GoocooRepo interface {
|
||||
SaveSlot(charID uint32, slot uint32, data []byte) error
|
||||
}
|
||||
|
||||
// DivaPrize represents a single reward milestone for the personal or guild track.
|
||||
type DivaPrize struct {
|
||||
ID int
|
||||
Type string
|
||||
PointsReq int
|
||||
ItemType int
|
||||
ItemID int
|
||||
Quantity int
|
||||
GR bool
|
||||
Repeatable bool
|
||||
}
|
||||
|
||||
// DivaRepo defines the contract for diva event data access.
|
||||
type DivaRepo interface {
|
||||
DeleteEvents() error
|
||||
@@ -327,6 +347,23 @@ type DivaRepo interface {
|
||||
AddPoints(charID uint32, eventID uint32, questPoints, bonusPoints uint32) error
|
||||
GetPoints(charID uint32, eventID uint32) (questPoints, bonusPoints int64, err error)
|
||||
GetTotalPoints(eventID uint32) (questPoints, bonusPoints int64, err error)
|
||||
|
||||
// Bead management
|
||||
GetBeads() ([]int, error)
|
||||
AssignBead(characterID uint32, beadIndex int, expiry time.Time) error
|
||||
AddBeadPoints(characterID uint32, beadIndex int, points int) error
|
||||
GetCharacterBeadPoints(characterID uint32) (map[int]int, error)
|
||||
GetTotalBeadPoints() (int64, error)
|
||||
GetTopBeadPerDay(day int) (int, error)
|
||||
CleanupBeads() error
|
||||
|
||||
// Prize rewards
|
||||
GetPersonalPrizes() ([]DivaPrize, error)
|
||||
GetGuildPrizes() ([]DivaPrize, error)
|
||||
|
||||
// Interception points (guild_characters.interception_points JSON)
|
||||
GetCharacterInterceptionPoints(characterID uint32) (map[string]int, error)
|
||||
AddInterceptionPoints(characterID uint32, questFileID int, points int) error
|
||||
}
|
||||
|
||||
// MiscRepo defines the contract for miscellaneous data access.
|
||||
@@ -348,3 +385,61 @@ type MercenaryRepo interface {
|
||||
GetGuildHuntCatsUsed(charID uint32) ([]GuildHuntCatUsage, error)
|
||||
GetGuildAirou(guildID uint32) ([][]byte, error)
|
||||
}
|
||||
|
||||
// Tournament represents a tournament schedule entry.
|
||||
type Tournament struct {
|
||||
ID uint32 `db:"id"`
|
||||
Name string `db:"name"`
|
||||
StartTime int64 `db:"start_time"`
|
||||
EntryEnd int64 `db:"entry_end"`
|
||||
RankingEnd int64 `db:"ranking_end"`
|
||||
RewardEnd int64 `db:"reward_end"`
|
||||
}
|
||||
|
||||
// TournamentCup represents a competition category within a tournament.
|
||||
type TournamentCup struct {
|
||||
ID uint32 `db:"id"`
|
||||
CupGroup int16 `db:"cup_group"`
|
||||
CupType int16 `db:"cup_type"`
|
||||
Unk int16 `db:"unk"`
|
||||
Name string `db:"name"`
|
||||
Description string `db:"description"`
|
||||
}
|
||||
|
||||
// TournamentSubEvent represents a specific hunt/fish target within a cup group.
|
||||
type TournamentSubEvent struct {
|
||||
ID uint32 `db:"id"`
|
||||
CupGroup int16 `db:"cup_group"`
|
||||
EventSubType int16 `db:"event_sub_type"`
|
||||
QuestFileID uint32 `db:"quest_file_id"`
|
||||
Name string `db:"name"`
|
||||
}
|
||||
|
||||
// TournamentRankEntry is a single entry in a leaderboard.
|
||||
type TournamentRankEntry struct {
|
||||
CharID uint32
|
||||
Rank uint32
|
||||
Grade uint16
|
||||
HR uint16
|
||||
GR uint16
|
||||
CharName string
|
||||
GuildName string
|
||||
}
|
||||
|
||||
// TournamentEntry represents a player's registration for a tournament.
|
||||
type TournamentEntry struct {
|
||||
ID uint32 `db:"id"`
|
||||
CharID uint32 `db:"char_id"`
|
||||
TournamentID uint32 `db:"tournament_id"`
|
||||
}
|
||||
|
||||
// TournamentRepo defines the contract for tournament schedule and result data access.
|
||||
type TournamentRepo interface {
|
||||
GetActive(now int64) (*Tournament, error)
|
||||
GetCups(tournamentID uint32) ([]TournamentCup, error)
|
||||
GetSubEvents() ([]TournamentSubEvent, error)
|
||||
Register(charID, tournamentID uint32) (entryID uint32, err error)
|
||||
GetEntry(charID, tournamentID uint32) (*TournamentEntry, error)
|
||||
SubmitResult(charID, tournamentID, eventID, questSlot, stageHandle uint32) error
|
||||
GetLeaderboard(eventID uint32) ([]TournamentRankEntry, error)
|
||||
}
|
||||
|
||||
@@ -248,6 +248,9 @@ func (m *mockCharacterRepo) SaveCharacterDataAtomic(_ SaveAtomicParams) error {
|
||||
func (m *mockCharacterRepo) LoadSaveDataWithHash(_ uint32) (uint32, []byte, bool, string, []byte, error) {
|
||||
return m.loadSaveDataID, m.loadSaveDataData, m.loadSaveDataNew, m.loadSaveDataName, m.loadSaveDataHash, m.loadSaveDataErr
|
||||
}
|
||||
func (m *mockCharacterRepo) LoadBackupsByRecency(_ uint32) ([]SavedataBackup, error) {
|
||||
return []SavedataBackup{}, nil
|
||||
}
|
||||
|
||||
// --- mockGoocooRepo ---
|
||||
|
||||
@@ -306,8 +309,10 @@ type mockGuildRepo struct {
|
||||
removeErr error
|
||||
createAppErr error
|
||||
getMemberErr error
|
||||
hasAppResult bool
|
||||
hasAppErr error
|
||||
hasAppResult bool
|
||||
hasAppErr error
|
||||
hasInviteResult bool
|
||||
hasInviteErr error
|
||||
listPostsErr error
|
||||
createPostErr error
|
||||
deletePostErr error
|
||||
@@ -315,8 +320,10 @@ type mockGuildRepo struct {
|
||||
// State tracking
|
||||
disbandedID uint32
|
||||
removedCharID uint32
|
||||
acceptedCharID uint32
|
||||
rejectedCharID uint32
|
||||
acceptedCharID uint32
|
||||
rejectedCharID uint32
|
||||
acceptInviteCharID uint32
|
||||
declineInviteCharID uint32
|
||||
savedGuild *Guild
|
||||
savedMembers []*GuildMember
|
||||
createdAppArgs []interface{}
|
||||
@@ -569,10 +576,19 @@ func (m *mockGuildRepo) CountGuildKills(_, _ uint32) (int, error) {
|
||||
// No-op stubs for remaining GuildRepo interface methods.
|
||||
func (m *mockGuildRepo) ListAll() ([]*Guild, error) { return nil, nil }
|
||||
func (m *mockGuildRepo) Create(_ uint32, _ string) (int32, error) { return 0, nil }
|
||||
func (m *mockGuildRepo) CreateApplicationWithMail(_, _, _ uint32, _ GuildApplicationType, _, _ uint32, _, _ string) error {
|
||||
return nil
|
||||
func (m *mockGuildRepo) CreateInviteWithMail(_, _, _, _, _ uint32, _, _ string) error { return nil }
|
||||
func (m *mockGuildRepo) HasInvite(_, _ uint32) (bool, error) {
|
||||
return m.hasInviteResult, m.hasInviteErr
|
||||
}
|
||||
func (m *mockGuildRepo) CancelInvite(_ uint32) error { return nil }
|
||||
func (m *mockGuildRepo) AcceptInvite(_, charID uint32) error {
|
||||
m.acceptInviteCharID = charID
|
||||
return m.acceptErr
|
||||
}
|
||||
func (m *mockGuildRepo) DeclineInvite(_, charID uint32) error {
|
||||
m.declineInviteCharID = charID
|
||||
return m.rejectErr
|
||||
}
|
||||
func (m *mockGuildRepo) CancelInvitation(_, _ uint32) error { return nil }
|
||||
func (m *mockGuildRepo) ArrangeCharacters(_ []uint32) error { return nil }
|
||||
func (m *mockGuildRepo) GetItemBox(_ uint32) ([]byte, error) { return nil, nil }
|
||||
func (m *mockGuildRepo) SaveItemBox(_ uint32, _ []byte) error { return nil }
|
||||
@@ -595,11 +611,13 @@ func (m *mockGuildRepo) CountNewPosts(_ uint32, _ time.Time) (int, error)
|
||||
func (m *mockGuildRepo) ListAlliances() ([]*GuildAlliance, error) { return nil, nil }
|
||||
func (m *mockGuildRepo) ClearTreasureHunt(_ uint32) error { return nil }
|
||||
func (m *mockGuildRepo) InsertKillLog(_ uint32, _ int, _ uint8, _ time.Time) error { return nil }
|
||||
func (m *mockGuildRepo) ListInvitedCharacters(_ uint32) ([]*ScoutedCharacter, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (m *mockGuildRepo) ListInvites(_ uint32) ([]*GuildInvite, error) { return nil, nil }
|
||||
func (m *mockGuildRepo) RolloverDailyRP(_ uint32, _ time.Time) error { return nil }
|
||||
func (m *mockGuildRepo) AddWeeklyBonusUsers(_ uint32, _ uint8) error { return nil }
|
||||
func (m *mockGuildRepo) FindOrCreateReturnGuild(_ uint8, _ string) (uint32, error) {
|
||||
return 1, nil
|
||||
}
|
||||
func (m *mockGuildRepo) AddMember(_, _ uint32) error { return nil }
|
||||
|
||||
// --- mockUserRepoForItems ---
|
||||
|
||||
@@ -1147,6 +1165,22 @@ func (m *mockDivaRepo) GetTotalPoints(eventID uint32) (int64, int64, error) {
|
||||
return tq, tb, nil
|
||||
}
|
||||
|
||||
func (m *mockDivaRepo) GetBeads() ([]int, error) { return nil, nil }
|
||||
func (m *mockDivaRepo) AssignBead(_ uint32, _ int, _ time.Time) error { return nil }
|
||||
func (m *mockDivaRepo) AddBeadPoints(_ uint32, _ int, _ int) error { return nil }
|
||||
func (m *mockDivaRepo) GetCharacterBeadPoints(_ uint32) (map[int]int, error) {
|
||||
return map[int]int{}, nil
|
||||
}
|
||||
func (m *mockDivaRepo) GetTotalBeadPoints() (int64, error) { return 0, nil }
|
||||
func (m *mockDivaRepo) GetTopBeadPerDay(_ int) (int, error) { return 0, nil }
|
||||
func (m *mockDivaRepo) CleanupBeads() error { return nil }
|
||||
func (m *mockDivaRepo) GetPersonalPrizes() ([]DivaPrize, error) { return nil, nil }
|
||||
func (m *mockDivaRepo) GetGuildPrizes() ([]DivaPrize, error) { return nil, nil }
|
||||
func (m *mockDivaRepo) GetCharacterInterceptionPoints(_ uint32) (map[string]int, error) {
|
||||
return map[string]int{}, nil
|
||||
}
|
||||
func (m *mockDivaRepo) AddInterceptionPoints(_ uint32, _ int, _ int) error { return nil }
|
||||
|
||||
// --- mockEventRepo ---
|
||||
|
||||
type mockEventRepo struct {
|
||||
@@ -1232,3 +1266,35 @@ func (m *mockCafeRepo) GetBonusItem(_ uint32) (uint32, uint32, error) {
|
||||
return m.bonusItemType, m.bonusItemQty, m.bonusItemErr
|
||||
}
|
||||
func (m *mockCafeRepo) AcceptBonus(_, _ uint32) error { return nil }
|
||||
|
||||
// --- mockTournamentRepo ---
|
||||
|
||||
type mockTournamentRepo struct {
|
||||
active *Tournament
|
||||
activeErr error
|
||||
cups []TournamentCup
|
||||
subEvents []TournamentSubEvent
|
||||
ranks []TournamentRankEntry
|
||||
registerID uint32
|
||||
registerErr error
|
||||
entry *TournamentEntry
|
||||
entryErr error
|
||||
}
|
||||
|
||||
func (m *mockTournamentRepo) GetActive(_ int64) (*Tournament, error) {
|
||||
return m.active, m.activeErr
|
||||
}
|
||||
func (m *mockTournamentRepo) GetCups(_ uint32) ([]TournamentCup, error) { return m.cups, nil }
|
||||
func (m *mockTournamentRepo) GetSubEvents() ([]TournamentSubEvent, error) {
|
||||
return m.subEvents, nil
|
||||
}
|
||||
func (m *mockTournamentRepo) Register(_, _ uint32) (uint32, error) {
|
||||
return m.registerID, m.registerErr
|
||||
}
|
||||
func (m *mockTournamentRepo) GetEntry(_, _ uint32) (*TournamentEntry, error) {
|
||||
return m.entry, m.entryErr
|
||||
}
|
||||
func (m *mockTournamentRepo) SubmitResult(_, _, _, _, _ uint32) error { return nil }
|
||||
func (m *mockTournamentRepo) GetLeaderboard(_ uint32) ([]TournamentRankEntry, error) {
|
||||
return m.ranks, nil
|
||||
}
|
||||
|
||||
167
server/channelserver/repo_tournament.go
Normal file
167
server/channelserver/repo_tournament.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
// TournamentRepository centralizes all database access for tournament tables.
|
||||
type TournamentRepository struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
// NewTournamentRepository creates a new TournamentRepository.
|
||||
func NewTournamentRepository(db *sqlx.DB) *TournamentRepository {
|
||||
return &TournamentRepository{db: db}
|
||||
}
|
||||
|
||||
// GetActive returns the most recently started tournament that is still within its
|
||||
// reward window (reward_end >= now), or nil if no active tournament exists.
|
||||
func (r *TournamentRepository) GetActive(now int64) (*Tournament, error) {
|
||||
var t Tournament
|
||||
err := r.db.QueryRowx(
|
||||
`SELECT id, name, start_time, entry_end, ranking_end, reward_end
|
||||
FROM tournaments
|
||||
WHERE start_time <= $1 AND reward_end >= $1
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 1`,
|
||||
now,
|
||||
).StructScan(&t)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get active tournament: %w", err)
|
||||
}
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
// GetCups returns all cups belonging to the given tournament, ordered by ID.
|
||||
func (r *TournamentRepository) GetCups(tournamentID uint32) ([]TournamentCup, error) {
|
||||
var cups []TournamentCup
|
||||
err := r.db.Select(&cups,
|
||||
`SELECT id, cup_group, cup_type, unk, name, description
|
||||
FROM tournament_cups
|
||||
WHERE tournament_id = $1
|
||||
ORDER BY id`,
|
||||
tournamentID,
|
||||
)
|
||||
return cups, err
|
||||
}
|
||||
|
||||
// GetSubEvents returns all sub-events ordered by cup group and event sub type.
|
||||
func (r *TournamentRepository) GetSubEvents() ([]TournamentSubEvent, error) {
|
||||
var events []TournamentSubEvent
|
||||
err := r.db.Select(&events,
|
||||
`SELECT id, cup_group, event_sub_type, quest_file_id, name
|
||||
FROM tournament_sub_events
|
||||
ORDER BY cup_group, event_sub_type`,
|
||||
)
|
||||
return events, err
|
||||
}
|
||||
|
||||
// Register registers a character for a tournament. If the character is already
|
||||
// registered the existing entry ID is returned (ON CONFLICT DO NOTHING, then re-SELECT).
|
||||
func (r *TournamentRepository) Register(charID, tournamentID uint32) (uint32, error) {
|
||||
_, err := r.db.Exec(
|
||||
`INSERT INTO tournament_entries (char_id, tournament_id)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (char_id, tournament_id) DO NOTHING`,
|
||||
charID, tournamentID,
|
||||
)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("insert tournament entry: %w", err)
|
||||
}
|
||||
var id uint32
|
||||
err = r.db.QueryRow(
|
||||
`SELECT id FROM tournament_entries WHERE char_id = $1 AND tournament_id = $2`,
|
||||
charID, tournamentID,
|
||||
).Scan(&id)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("fetch tournament entry id: %w", err)
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// GetEntry returns the registration record for a character/tournament pair, or nil if not found.
|
||||
func (r *TournamentRepository) GetEntry(charID, tournamentID uint32) (*TournamentEntry, error) {
|
||||
var e TournamentEntry
|
||||
err := r.db.QueryRowx(
|
||||
`SELECT id, char_id, tournament_id
|
||||
FROM tournament_entries
|
||||
WHERE char_id = $1 AND tournament_id = $2`,
|
||||
charID, tournamentID,
|
||||
).StructScan(&e)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get tournament entry: %w", err)
|
||||
}
|
||||
return &e, nil
|
||||
}
|
||||
|
||||
// SubmitResult records a completed tournament run for a character.
|
||||
func (r *TournamentRepository) SubmitResult(charID, tournamentID, eventID, questSlot, stageHandle uint32) error {
|
||||
_, err := r.db.Exec(
|
||||
`INSERT INTO tournament_results (char_id, tournament_id, event_id, quest_slot, stage_handle)
|
||||
VALUES ($1, $2, $3, $4, $5)`,
|
||||
charID, tournamentID, eventID, questSlot, stageHandle,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert tournament result: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLeaderboard returns the ranked leaderboard for an event ID.
|
||||
// Rank is assigned by submission order (first submitted = rank 1).
|
||||
// Returns at most 100 entries.
|
||||
func (r *TournamentRepository) GetLeaderboard(eventID uint32) ([]TournamentRankEntry, error) {
|
||||
type row struct {
|
||||
CharID uint32 `db:"char_id"`
|
||||
Rank int64 `db:"rank"`
|
||||
Grade int `db:"grade"`
|
||||
HR int `db:"hr"`
|
||||
GR int `db:"gr"`
|
||||
CharName string `db:"char_name"`
|
||||
GuildName string `db:"guild_name"`
|
||||
}
|
||||
var rows []row
|
||||
err := r.db.Select(&rows, `
|
||||
SELECT
|
||||
r.char_id,
|
||||
ROW_NUMBER() OVER (ORDER BY r.submitted_at ASC)::int AS rank,
|
||||
c.gr::int AS grade,
|
||||
c.hr::int AS hr,
|
||||
c.gr::int AS gr,
|
||||
c.name AS char_name,
|
||||
COALESCE(g.name, '') AS guild_name
|
||||
FROM tournament_results r
|
||||
JOIN characters c ON c.id = r.char_id
|
||||
LEFT JOIN guild_characters gc ON gc.character_id = r.char_id
|
||||
LEFT JOIN guilds g ON g.id = gc.guild_id
|
||||
WHERE r.event_id = $1
|
||||
ORDER BY r.submitted_at ASC
|
||||
LIMIT 100`,
|
||||
eventID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get tournament leaderboard: %w", err)
|
||||
}
|
||||
entries := make([]TournamentRankEntry, len(rows))
|
||||
for i, row := range rows {
|
||||
entries[i] = TournamentRankEntry{
|
||||
CharID: row.CharID,
|
||||
Rank: uint32(row.Rank),
|
||||
Grade: uint16(row.Grade),
|
||||
HR: uint16(row.HR),
|
||||
GR: uint16(row.GR),
|
||||
CharName: row.CharName,
|
||||
GuildName: row.GuildName,
|
||||
}
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
472
server/channelserver/scenario_json.go
Normal file
472
server/channelserver/scenario_json.go
Normal file
@@ -0,0 +1,472 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"golang.org/x/text/encoding/japanese"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
// ── Constants ─────────────────────────────────────────────────────────────────
|
||||
|
||||
// jkrMagic is the little-endian magic number at the start of a JKR-compressed
|
||||
// blob: bytes 0x4A 0x4B 0x52 0x1A ('J','K','R',0x1A).
|
||||
const jkrMagic uint32 = 0x1A524B4A
|
||||
|
||||
// scenarioChunkSizeLimit is the maximum byte length the client accepts for any
|
||||
// single chunk (chunk0, chunk1, or chunk2). Confirmed from the client's response
|
||||
// handler (FUN_11525c60 in mhfo-hd.dll): chunks larger than this are silently
|
||||
// discarded, so the server must never serve a chunk exceeding this limit.
|
||||
const scenarioChunkSizeLimit = 0x8000
|
||||
|
||||
// ── JSON schema types ────────────────────────────────────────────────────────
|
||||
|
||||
// ScenarioJSON is the open, human-editable representation of a scenario .bin file.
|
||||
// Strings are stored as UTF-8; the compiler converts to/from Shift-JIS.
|
||||
//
|
||||
// Container layout (big-endian sizes):
|
||||
//
|
||||
// @0x00: u32 BE chunk0_size
|
||||
// @0x04: u32 BE chunk1_size
|
||||
// [chunk0_data]
|
||||
// [chunk1_data]
|
||||
// u32 BE chunk2_size (only present when non-zero)
|
||||
// [chunk2_data]
|
||||
//
|
||||
// Each chunk must not exceed scenarioChunkSizeLimit bytes.
|
||||
type ScenarioJSON struct {
|
||||
// Chunk0 holds quest name/description data (sub-header or inline format).
|
||||
Chunk0 *ScenarioChunk0JSON `json:"chunk0,omitempty"`
|
||||
// Chunk1 holds NPC dialog data (sub-header format or raw JKR blob).
|
||||
Chunk1 *ScenarioChunk1JSON `json:"chunk1,omitempty"`
|
||||
// Chunk2 holds JKR-compressed menu/title data.
|
||||
Chunk2 *ScenarioRawChunkJSON `json:"chunk2,omitempty"`
|
||||
}
|
||||
|
||||
// ScenarioChunk0JSON represents chunk0, which is either sub-header or inline format.
|
||||
// Exactly one of Subheader/Inline is non-nil.
|
||||
type ScenarioChunk0JSON struct {
|
||||
Subheader *ScenarioSubheaderJSON `json:"subheader,omitempty"`
|
||||
Inline []ScenarioInlineEntry `json:"inline,omitempty"`
|
||||
}
|
||||
|
||||
// ScenarioChunk1JSON represents chunk1, which is either sub-header or raw JKR.
|
||||
// Exactly one of Subheader/JKR is non-nil.
|
||||
type ScenarioChunk1JSON struct {
|
||||
Subheader *ScenarioSubheaderJSON `json:"subheader,omitempty"`
|
||||
JKR *ScenarioRawChunkJSON `json:"jkr,omitempty"`
|
||||
}
|
||||
|
||||
// ScenarioSubheaderJSON represents a chunk in sub-header format.
|
||||
//
|
||||
// Sub-header binary layout (8 bytes, little-endian where applicable):
|
||||
//
|
||||
// @0: u8 Type (usually 0x01; the client treats this as a compound-container tag)
|
||||
// @1: u8 0x00 (pad; must be 0x00 — used by the server to detect this format vs inline)
|
||||
// @2: u16 Size (total chunk size including this header, LE)
|
||||
// @4: u8 Count (number of string entries)
|
||||
// @5: u8 Unknown1 (purpose unconfirmed; preserved round-trip)
|
||||
// @6: u8 MetaSize (byte length of the metadata block; 0x14 for chunk0, 0x2C for chunk1)
|
||||
// @7: u8 Unknown2 (purpose unconfirmed; preserved round-trip)
|
||||
// [MetaSize bytes: opaque metadata — see docs/scenario-format.md for field breakdown]
|
||||
// [null-terminated Shift-JIS strings, one per entry]
|
||||
// [0xFF end-of-strings sentinel]
|
||||
//
|
||||
// Chunk0 metadata (MetaSize=0x14, 10×u16 LE):
|
||||
//
|
||||
// m[0]=CategoryID m[1]=MainID m[2]=0 m[3]=0 m[4]=0
|
||||
// m[5]=str0_len m[6]=SceneRef (MainID when cat=0, 0xFFFF otherwise)
|
||||
// m[7..9]: not read by the client parser (FUN_1080d310 in mhfo-hd.dll)
|
||||
//
|
||||
// Chunk1 metadata (MetaSize=0x2C, 22×u16 LE):
|
||||
//
|
||||
// m[8..17] are interpreted as signed offsets by the client (FUN_1080d3b0):
|
||||
// negative → (~value) + dialog_base (into post-0xFF dialog script)
|
||||
// non-negative → value + strings_base (into strings section)
|
||||
// m[18..19] are read as individual bytes, not u16 pairs.
|
||||
type ScenarioSubheaderJSON struct {
|
||||
// Type is the chunk type byte (almost always 0x01).
|
||||
Type uint8 `json:"type"`
|
||||
// Unknown1 is the byte at sub-header offset 5. Purpose not confirmed;
|
||||
// always 0x00 in observed files.
|
||||
Unknown1 uint8 `json:"unknown1"`
|
||||
// Unknown2 is the byte at sub-header offset 7. Purpose not confirmed;
|
||||
// always 0x00 in observed files.
|
||||
Unknown2 uint8 `json:"unknown2"`
|
||||
// Metadata is the opaque metadata block, base64-encoded.
|
||||
// It is preserved verbatim so the client receives correct values for all
|
||||
// fields, including those the server does not need to interpret.
|
||||
// For chunk0, the client only reads m[0]–m[6]; m[7]–m[9] are ignored.
|
||||
Metadata string `json:"metadata"`
|
||||
// Strings contains the human-editable text (UTF-8).
|
||||
// The compiler converts each string to null-terminated Shift-JIS on the wire.
|
||||
Strings []string `json:"strings"`
|
||||
}
|
||||
|
||||
// ScenarioInlineEntry is one entry in an inline-format chunk0.
|
||||
// Format on wire: {u8 index}{Shift-JIS string}{0x00}.
|
||||
type ScenarioInlineEntry struct {
|
||||
Index uint8 `json:"index"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// ScenarioRawChunkJSON stores a JKR-compressed chunk as its raw compressed bytes.
|
||||
// The data is served to the client as-is; the format of the decompressed content
|
||||
// is not yet fully documented.
|
||||
type ScenarioRawChunkJSON struct {
|
||||
// Data is the raw JKR-compressed bytes, base64-encoded.
|
||||
Data string `json:"data"`
|
||||
}
|
||||
|
||||
// ── Parse: binary → JSON ─────────────────────────────────────────────────────
|
||||
|
||||
// ParseScenarioBinary reads a scenario .bin file and returns a ScenarioJSON
|
||||
// suitable for editing and re-compilation with CompileScenarioJSON.
|
||||
func ParseScenarioBinary(data []byte) (*ScenarioJSON, error) {
|
||||
if len(data) < 8 {
|
||||
return nil, fmt.Errorf("scenario data too short: %d bytes", len(data))
|
||||
}
|
||||
|
||||
c0Size := int(binary.BigEndian.Uint32(data[0:4]))
|
||||
c1Size := int(binary.BigEndian.Uint32(data[4:8]))
|
||||
|
||||
result := &ScenarioJSON{}
|
||||
|
||||
// Chunk0
|
||||
c0Off := 8
|
||||
if c0Size > 0 {
|
||||
if c0Off+c0Size > len(data) {
|
||||
return nil, fmt.Errorf("chunk0 size %d overruns data at offset %d", c0Size, c0Off)
|
||||
}
|
||||
chunk0, err := parseScenarioChunk0(data[c0Off : c0Off+c0Size])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk0: %w", err)
|
||||
}
|
||||
result.Chunk0 = chunk0
|
||||
}
|
||||
|
||||
// Chunk1
|
||||
c1Off := c0Off + c0Size
|
||||
if c1Size > 0 {
|
||||
if c1Off+c1Size > len(data) {
|
||||
return nil, fmt.Errorf("chunk1 size %d overruns data at offset %d", c1Size, c1Off)
|
||||
}
|
||||
chunk1, err := parseScenarioChunk1(data[c1Off : c1Off+c1Size])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk1: %w", err)
|
||||
}
|
||||
result.Chunk1 = chunk1
|
||||
}
|
||||
|
||||
// Chunk2 (preceded by its own 4-byte size field)
|
||||
c2HdrOff := c1Off + c1Size
|
||||
if c2HdrOff+4 <= len(data) {
|
||||
c2Size := int(binary.BigEndian.Uint32(data[c2HdrOff : c2HdrOff+4]))
|
||||
if c2Size > 0 {
|
||||
c2DataOff := c2HdrOff + 4
|
||||
if c2DataOff+c2Size > len(data) {
|
||||
return nil, fmt.Errorf("chunk2 size %d overruns data at offset %d", c2Size, c2DataOff)
|
||||
}
|
||||
result.Chunk2 = &ScenarioRawChunkJSON{
|
||||
Data: base64.StdEncoding.EncodeToString(data[c2DataOff : c2DataOff+c2Size]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseScenarioChunk0 auto-detects sub-header vs inline format.
|
||||
// The second byte being 0x00 is the pad byte in sub-headers; non-zero means inline.
|
||||
func parseScenarioChunk0(data []byte) (*ScenarioChunk0JSON, error) {
|
||||
if len(data) < 2 {
|
||||
return &ScenarioChunk0JSON{}, nil
|
||||
}
|
||||
if data[1] == 0x00 {
|
||||
sh, err := parseScenarioSubheader(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ScenarioChunk0JSON{Subheader: sh}, nil
|
||||
}
|
||||
entries, err := parseScenarioInline(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ScenarioChunk0JSON{Inline: entries}, nil
|
||||
}
|
||||
|
||||
// parseScenarioChunk1 parses chunk1 as JKR or sub-header depending on magic bytes.
|
||||
// JKR-compressed chunks start with the magic 'J','K','R',0x1A (LE u32 = jkrMagic).
|
||||
func parseScenarioChunk1(data []byte) (*ScenarioChunk1JSON, error) {
|
||||
if len(data) >= 4 && binary.LittleEndian.Uint32(data[0:4]) == jkrMagic {
|
||||
return &ScenarioChunk1JSON{
|
||||
JKR: &ScenarioRawChunkJSON{
|
||||
Data: base64.StdEncoding.EncodeToString(data),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
sh, err := parseScenarioSubheader(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ScenarioChunk1JSON{Subheader: sh}, nil
|
||||
}
|
||||
|
||||
// parseScenarioSubheader parses the 8-byte sub-header + metadata + strings.
|
||||
func parseScenarioSubheader(data []byte) (*ScenarioSubheaderJSON, error) {
|
||||
if len(data) < 8 {
|
||||
return nil, fmt.Errorf("sub-header chunk too short: %d bytes", len(data))
|
||||
}
|
||||
|
||||
// 8-byte sub-header fields:
|
||||
chunkType := data[0] // @0: chunk type (0x01 = compound container)
|
||||
// data[1] // @1: pad 0x00 (format detector; not stored)
|
||||
// data[2:4] // @2: u16 LE total size (recomputed on compile)
|
||||
entryCount := int(data[4]) // @4: number of string entries
|
||||
unknown1 := data[5] // @5: purpose unknown; always 0x00 in observed files
|
||||
metaSize := int(data[6]) // @6: byte length of metadata block (0x14=C0, 0x2C=C1)
|
||||
unknown2 := data[7] // @7: purpose unknown; always 0x00 in observed files
|
||||
|
||||
metaEnd := 8 + metaSize
|
||||
if metaEnd > len(data) {
|
||||
return nil, fmt.Errorf("metadata block (size %d) overruns chunk (len %d)", metaSize, len(data))
|
||||
}
|
||||
|
||||
metadata := base64.StdEncoding.EncodeToString(data[8:metaEnd])
|
||||
|
||||
strings, err := scenarioReadStrings(data, metaEnd, entryCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ScenarioSubheaderJSON{
|
||||
Type: chunkType,
|
||||
Unknown1: unknown1,
|
||||
Unknown2: unknown2,
|
||||
Metadata: metadata,
|
||||
Strings: strings,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseScenarioInline parses chunk0 inline format: {u8 index}{Shift-JIS string}{0x00}.
|
||||
func parseScenarioInline(data []byte) ([]ScenarioInlineEntry, error) {
|
||||
var result []ScenarioInlineEntry
|
||||
pos := 0
|
||||
for pos < len(data) {
|
||||
if data[pos] == 0x00 {
|
||||
pos++
|
||||
continue
|
||||
}
|
||||
idx := data[pos]
|
||||
pos++
|
||||
if pos >= len(data) {
|
||||
break
|
||||
}
|
||||
end := pos
|
||||
for end < len(data) && data[end] != 0x00 {
|
||||
end++
|
||||
}
|
||||
if end > pos {
|
||||
text, err := scenarioDecodeShiftJIS(data[pos:end])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("inline entry at 0x%x: %w", pos, err)
|
||||
}
|
||||
result = append(result, ScenarioInlineEntry{Index: idx, Text: text})
|
||||
}
|
||||
pos = end + 1 // skip null terminator
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// scenarioReadStrings scans for null-terminated Shift-JIS strings starting at
|
||||
// offset start, reading at most maxCount strings (0 = unlimited). Stops on 0xFF.
|
||||
func scenarioReadStrings(data []byte, start, maxCount int) ([]string, error) {
|
||||
var result []string
|
||||
pos := start
|
||||
for pos < len(data) {
|
||||
if maxCount > 0 && len(result) >= maxCount {
|
||||
break
|
||||
}
|
||||
if data[pos] == 0x00 {
|
||||
pos++
|
||||
continue
|
||||
}
|
||||
if data[pos] == 0xFF {
|
||||
break
|
||||
}
|
||||
end := pos
|
||||
for end < len(data) && data[end] != 0x00 {
|
||||
end++
|
||||
}
|
||||
if end > pos {
|
||||
text, err := scenarioDecodeShiftJIS(data[pos:end])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("string at 0x%x: %w", pos, err)
|
||||
}
|
||||
result = append(result, text)
|
||||
}
|
||||
pos = end + 1
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ── Compile: JSON → binary ───────────────────────────────────────────────────
|
||||
|
||||
// CompileScenarioJSON parses jsonData and compiles it to MHF scenario binary format.
|
||||
func CompileScenarioJSON(jsonData []byte) ([]byte, error) {
|
||||
var s ScenarioJSON
|
||||
if err := json.Unmarshal(jsonData, &s); err != nil {
|
||||
return nil, fmt.Errorf("unmarshal scenario JSON: %w", err)
|
||||
}
|
||||
return compileScenario(&s)
|
||||
}
|
||||
|
||||
func compileScenario(s *ScenarioJSON) ([]byte, error) {
|
||||
var chunk0, chunk1, chunk2 []byte
|
||||
var err error
|
||||
|
||||
if s.Chunk0 != nil {
|
||||
chunk0, err = compileScenarioChunk0(s.Chunk0)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk0: %w", err)
|
||||
}
|
||||
}
|
||||
if s.Chunk1 != nil {
|
||||
chunk1, err = compileScenarioChunk1(s.Chunk1)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk1: %w", err)
|
||||
}
|
||||
}
|
||||
if s.Chunk2 != nil {
|
||||
chunk2, err = compileScenarioRawChunk(s.Chunk2)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk2: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
for i, chunk := range [][]byte{chunk0, chunk1, chunk2} {
|
||||
if len(chunk) > scenarioChunkSizeLimit {
|
||||
return nil, fmt.Errorf("chunk%d size %d exceeds client limit of %d bytes", i, len(chunk), scenarioChunkSizeLimit)
|
||||
}
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
// Container header: c0_size, c1_size (big-endian u32)
|
||||
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk0)))
|
||||
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk1)))
|
||||
buf.Write(chunk0)
|
||||
buf.Write(chunk1)
|
||||
// Chunk2 preceded by its own size field
|
||||
if len(chunk2) > 0 {
|
||||
_ = binary.Write(&buf, binary.BigEndian, uint32(len(chunk2)))
|
||||
buf.Write(chunk2)
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func compileScenarioChunk0(c *ScenarioChunk0JSON) ([]byte, error) {
|
||||
if c.Subheader != nil {
|
||||
return compileScenarioSubheader(c.Subheader)
|
||||
}
|
||||
return compileScenarioInline(c.Inline)
|
||||
}
|
||||
|
||||
func compileScenarioChunk1(c *ScenarioChunk1JSON) ([]byte, error) {
|
||||
if c.JKR != nil {
|
||||
return compileScenarioRawChunk(c.JKR)
|
||||
}
|
||||
if c.Subheader != nil {
|
||||
return compileScenarioSubheader(c.Subheader)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// compileScenarioSubheader builds the binary sub-header chunk:
|
||||
// [8-byte header][metadata][null-terminated Shift-JIS strings][0xFF]
|
||||
func compileScenarioSubheader(sh *ScenarioSubheaderJSON) ([]byte, error) {
|
||||
meta, err := base64.StdEncoding.DecodeString(sh.Metadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("decode metadata base64: %w", err)
|
||||
}
|
||||
|
||||
var strBuf bytes.Buffer
|
||||
for _, s := range sh.Strings {
|
||||
sjis, err := scenarioEncodeShiftJIS(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
strBuf.Write(sjis) // sjis already has null terminator from helper
|
||||
}
|
||||
strBuf.WriteByte(0xFF) // end-of-strings sentinel
|
||||
|
||||
// Total size = 8-byte header + metadata + strings
|
||||
totalSize := 8 + len(meta) + strBuf.Len()
|
||||
|
||||
var buf bytes.Buffer
|
||||
buf.WriteByte(sh.Type)
|
||||
buf.WriteByte(0x00) // pad (format detector)
|
||||
// u16 LE total size
|
||||
buf.WriteByte(byte(totalSize))
|
||||
buf.WriteByte(byte(totalSize >> 8))
|
||||
buf.WriteByte(byte(len(sh.Strings))) // entry count
|
||||
buf.WriteByte(sh.Unknown1)
|
||||
buf.WriteByte(byte(len(meta))) // metadata total size
|
||||
buf.WriteByte(sh.Unknown2)
|
||||
buf.Write(meta)
|
||||
buf.Write(strBuf.Bytes())
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// compileScenarioInline builds the inline-format chunk0 bytes.
|
||||
func compileScenarioInline(entries []ScenarioInlineEntry) ([]byte, error) {
|
||||
var buf bytes.Buffer
|
||||
for _, e := range entries {
|
||||
buf.WriteByte(e.Index)
|
||||
sjis, err := scenarioEncodeShiftJIS(e.Text)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf.Write(sjis) // includes null terminator
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// compileScenarioRawChunk decodes the base64 raw chunk bytes.
|
||||
// These are served to the client as-is (no re-compression).
|
||||
func compileScenarioRawChunk(rc *ScenarioRawChunkJSON) ([]byte, error) {
|
||||
data, err := base64.StdEncoding.DecodeString(rc.Data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("decode raw chunk base64: %w", err)
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// ── String helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
// scenarioDecodeShiftJIS converts a raw Shift-JIS byte slice to UTF-8 string.
|
||||
func scenarioDecodeShiftJIS(b []byte) (string, error) {
|
||||
dec := japanese.ShiftJIS.NewDecoder()
|
||||
out, _, err := transform.Bytes(dec, b)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("shift-jis decode: %w", err)
|
||||
}
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
// scenarioEncodeShiftJIS converts a UTF-8 string to a null-terminated Shift-JIS byte slice.
|
||||
func scenarioEncodeShiftJIS(s string) ([]byte, error) {
|
||||
enc := japanese.ShiftJIS.NewEncoder()
|
||||
out, _, err := transform.Bytes(enc, []byte(s))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("shift-jis encode %q: %w", s, err)
|
||||
}
|
||||
return append(out, 0x00), nil
|
||||
}
|
||||
|
||||
472
server/channelserver/scenario_json_test.go
Normal file
472
server/channelserver/scenario_json_test.go
Normal file
@@ -0,0 +1,472 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// ── test helpers ─────────────────────────────────────────────────────────────
|
||||
|
||||
// buildTestSubheaderChunk constructs a minimal sub-header format chunk.
|
||||
// metadata is zero-filled to metaSize bytes.
|
||||
func buildTestSubheaderChunk(t *testing.T, strings []string, metaSize int) []byte {
|
||||
t.Helper()
|
||||
var strBuf bytes.Buffer
|
||||
for _, s := range strings {
|
||||
sjis, err := scenarioEncodeShiftJIS(s)
|
||||
if err != nil {
|
||||
t.Fatalf("encode %q: %v", s, err)
|
||||
}
|
||||
strBuf.Write(sjis)
|
||||
}
|
||||
strBuf.WriteByte(0xFF) // end sentinel
|
||||
|
||||
totalSize := 8 + metaSize + strBuf.Len()
|
||||
meta := make([]byte, metaSize) // zero metadata
|
||||
|
||||
var buf bytes.Buffer
|
||||
buf.WriteByte(0x01) // type
|
||||
buf.WriteByte(0x00) // pad
|
||||
buf.WriteByte(byte(totalSize)) // size lo
|
||||
buf.WriteByte(byte(totalSize >> 8)) // size hi
|
||||
buf.WriteByte(byte(len(strings))) // entry count
|
||||
buf.WriteByte(0x00) // unknown1
|
||||
buf.WriteByte(byte(metaSize)) // metadata total
|
||||
buf.WriteByte(0x00) // unknown2
|
||||
buf.Write(meta)
|
||||
buf.Write(strBuf.Bytes())
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// buildTestInlineChunk constructs an inline-format chunk0.
|
||||
func buildTestInlineChunk(t *testing.T, strings []string) []byte {
|
||||
t.Helper()
|
||||
var buf bytes.Buffer
|
||||
for i, s := range strings {
|
||||
buf.WriteByte(byte(i + 1)) // 1-based index
|
||||
sjis, err := scenarioEncodeShiftJIS(s)
|
||||
if err != nil {
|
||||
t.Fatalf("encode %q: %v", s, err)
|
||||
}
|
||||
buf.Write(sjis)
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// buildTestScenarioBinary assembles a complete scenario container for testing.
|
||||
func buildTestScenarioBinary(t *testing.T, c0, c1 []byte) []byte {
|
||||
t.Helper()
|
||||
var buf bytes.Buffer
|
||||
if err := binary.Write(&buf, binary.BigEndian, uint32(len(c0))); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := binary.Write(&buf, binary.BigEndian, uint32(len(c1))); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
buf.Write(c0)
|
||||
buf.Write(c1)
|
||||
// c2 size = 0
|
||||
if err := binary.Write(&buf, binary.BigEndian, uint32(0)); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// extractStringsFromScenario parses a binary and returns all strings it contains.
|
||||
func extractStringsFromScenario(t *testing.T, data []byte) []string {
|
||||
t.Helper()
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseScenarioBinary: %v", err)
|
||||
}
|
||||
var result []string
|
||||
if s.Chunk0 != nil {
|
||||
if s.Chunk0.Subheader != nil {
|
||||
result = append(result, s.Chunk0.Subheader.Strings...)
|
||||
}
|
||||
for _, e := range s.Chunk0.Inline {
|
||||
result = append(result, e.Text)
|
||||
}
|
||||
}
|
||||
if s.Chunk1 != nil && s.Chunk1.Subheader != nil {
|
||||
result = append(result, s.Chunk1.Subheader.Strings...)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ── parse tests ──────────────────────────────────────────────────────────────
|
||||
|
||||
func TestParseScenarioBinary_TooShort(t *testing.T) {
|
||||
_, err := ParseScenarioBinary([]byte{0x00, 0x01})
|
||||
if err == nil {
|
||||
t.Error("expected error for short input")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseScenarioBinary_EmptyChunks(t *testing.T) {
|
||||
data := buildTestScenarioBinary(t, nil, nil)
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if s.Chunk0 != nil || s.Chunk1 != nil || s.Chunk2 != nil {
|
||||
t.Error("expected all chunks nil for empty scenario")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseScenarioBinary_SubheaderChunk0(t *testing.T) {
|
||||
c0 := buildTestSubheaderChunk(t, []string{"Quest A", "Quest B"}, 4)
|
||||
data := buildTestScenarioBinary(t, c0, nil)
|
||||
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if s.Chunk0 == nil || s.Chunk0.Subheader == nil {
|
||||
t.Fatal("expected chunk0 subheader")
|
||||
}
|
||||
got := s.Chunk0.Subheader.Strings
|
||||
want := []string{"Quest A", "Quest B"}
|
||||
if len(got) != len(want) {
|
||||
t.Fatalf("string count: got %d, want %d", len(got), len(want))
|
||||
}
|
||||
for i := range want {
|
||||
if got[i] != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseScenarioBinary_InlineChunk0(t *testing.T) {
|
||||
c0 := buildTestInlineChunk(t, []string{"Item1", "Item2"})
|
||||
data := buildTestScenarioBinary(t, c0, nil)
|
||||
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if s.Chunk0 == nil || len(s.Chunk0.Inline) == 0 {
|
||||
t.Fatal("expected chunk0 inline entries")
|
||||
}
|
||||
want := []string{"Item1", "Item2"}
|
||||
for i, e := range s.Chunk0.Inline {
|
||||
if e.Text != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, e.Text, want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseScenarioBinary_BothChunks(t *testing.T) {
|
||||
c0 := buildTestSubheaderChunk(t, []string{"Quest"}, 4)
|
||||
c1 := buildTestSubheaderChunk(t, []string{"NPC1", "NPC2"}, 8)
|
||||
data := buildTestScenarioBinary(t, c0, c1)
|
||||
|
||||
strings := extractStringsFromScenario(t, data)
|
||||
want := []string{"Quest", "NPC1", "NPC2"}
|
||||
if len(strings) != len(want) {
|
||||
t.Fatalf("string count: got %d, want %d", len(strings), len(want))
|
||||
}
|
||||
for i := range want {
|
||||
if strings[i] != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, strings[i], want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseScenarioBinary_Japanese(t *testing.T) {
|
||||
c0 := buildTestSubheaderChunk(t, []string{"テスト", "日本語"}, 4)
|
||||
data := buildTestScenarioBinary(t, c0, nil)
|
||||
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
want := []string{"テスト", "日本語"}
|
||||
got := s.Chunk0.Subheader.Strings
|
||||
for i := range want {
|
||||
if got[i] != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── compile tests ─────────────────────────────────────────────────────────────
|
||||
|
||||
func TestCompileScenarioJSON_Subheader(t *testing.T) {
|
||||
input := &ScenarioJSON{
|
||||
Chunk0: &ScenarioChunk0JSON{
|
||||
Subheader: &ScenarioSubheaderJSON{
|
||||
Type: 0x01,
|
||||
Unknown1: 0x00,
|
||||
Unknown2: 0x00,
|
||||
Metadata: "AAAABBBB", // base64 of 6 zero bytes
|
||||
Strings: []string{"Hello", "World"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(input)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("CompileScenarioJSON: %v", err)
|
||||
}
|
||||
|
||||
// Parse the compiled output and verify strings survive
|
||||
result, err := ParseScenarioBinary(compiled)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseScenarioBinary on compiled output: %v", err)
|
||||
}
|
||||
if result.Chunk0 == nil || result.Chunk0.Subheader == nil {
|
||||
t.Fatal("expected chunk0 subheader in compiled output")
|
||||
}
|
||||
want := []string{"Hello", "World"}
|
||||
got := result.Chunk0.Subheader.Strings
|
||||
for i := range want {
|
||||
if i >= len(got) || got[i] != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileScenarioJSON_Inline(t *testing.T) {
|
||||
input := &ScenarioJSON{
|
||||
Chunk0: &ScenarioChunk0JSON{
|
||||
Inline: []ScenarioInlineEntry{
|
||||
{Index: 1, Text: "Sword"},
|
||||
{Index: 2, Text: "Shield"},
|
||||
},
|
||||
},
|
||||
}
|
||||
jsonData, _ := json.Marshal(input)
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("CompileScenarioJSON: %v", err)
|
||||
}
|
||||
|
||||
result, err := ParseScenarioBinary(compiled)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseScenarioBinary: %v", err)
|
||||
}
|
||||
if result.Chunk0 == nil || len(result.Chunk0.Inline) != 2 {
|
||||
t.Fatal("expected 2 inline entries")
|
||||
}
|
||||
if result.Chunk0.Inline[0].Text != "Sword" {
|
||||
t.Errorf("got %q, want Sword", result.Chunk0.Inline[0].Text)
|
||||
}
|
||||
if result.Chunk0.Inline[1].Text != "Shield" {
|
||||
t.Errorf("got %q, want Shield", result.Chunk0.Inline[1].Text)
|
||||
}
|
||||
}
|
||||
|
||||
// ── round-trip tests ─────────────────────────────────────────────────────────
|
||||
|
||||
func TestScenarioRoundTrip_Subheader(t *testing.T) {
|
||||
original := buildTestScenarioBinary(t,
|
||||
buildTestSubheaderChunk(t, []string{"QuestName", "Description"}, 0x14),
|
||||
buildTestSubheaderChunk(t, []string{"Dialog1", "Dialog2", "Dialog3"}, 0x2C),
|
||||
)
|
||||
|
||||
s, err := ParseScenarioBinary(original)
|
||||
if err != nil {
|
||||
t.Fatalf("parse: %v", err)
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(s)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("compile: %v", err)
|
||||
}
|
||||
|
||||
// Re-parse compiled and compare strings
|
||||
wantStrings := []string{"QuestName", "Description", "Dialog1", "Dialog2", "Dialog3"}
|
||||
gotStrings := extractStringsFromScenario(t, compiled)
|
||||
if len(gotStrings) != len(wantStrings) {
|
||||
t.Fatalf("string count: got %d, want %d", len(gotStrings), len(wantStrings))
|
||||
}
|
||||
for i := range wantStrings {
|
||||
if gotStrings[i] != wantStrings[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, gotStrings[i], wantStrings[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScenarioRoundTrip_Inline(t *testing.T) {
|
||||
original := buildTestScenarioBinary(t,
|
||||
buildTestInlineChunk(t, []string{"EpisodeA", "EpisodeB"}),
|
||||
nil,
|
||||
)
|
||||
|
||||
s, _ := ParseScenarioBinary(original)
|
||||
jsonData, _ := json.Marshal(s)
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("compile: %v", err)
|
||||
}
|
||||
|
||||
got := extractStringsFromScenario(t, compiled)
|
||||
want := []string{"EpisodeA", "EpisodeB"}
|
||||
for i := range want {
|
||||
if i >= len(got) || got[i] != want[i] {
|
||||
t.Errorf("[%d]: got %q, want %q", i, got[i], want[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScenarioRoundTrip_MetadataPreserved(t *testing.T) {
|
||||
// The metadata block must survive parse → JSON → compile unchanged.
|
||||
metaBytes := []byte{0x01, 0x02, 0x03, 0x04, 0xFF, 0xFE, 0xFD, 0xFC}
|
||||
// Build a chunk with custom metadata and unknown field values by hand.
|
||||
var buf bytes.Buffer
|
||||
str := []byte("A\x00\xFF")
|
||||
totalSize := 8 + len(metaBytes) + len(str)
|
||||
buf.WriteByte(0x01)
|
||||
buf.WriteByte(0x00)
|
||||
buf.WriteByte(byte(totalSize))
|
||||
buf.WriteByte(byte(totalSize >> 8))
|
||||
buf.WriteByte(0x01) // entry count
|
||||
buf.WriteByte(0xAA) // unknown1
|
||||
buf.WriteByte(byte(len(metaBytes)))
|
||||
buf.WriteByte(0xBB) // unknown2
|
||||
buf.Write(metaBytes)
|
||||
buf.Write(str)
|
||||
c0 := buf.Bytes()
|
||||
|
||||
data := buildTestScenarioBinary(t, c0, nil)
|
||||
s, err := ParseScenarioBinary(data)
|
||||
if err != nil {
|
||||
t.Fatalf("parse: %v", err)
|
||||
}
|
||||
sh := s.Chunk0.Subheader
|
||||
if sh.Type != 0x01 || sh.Unknown1 != 0xAA || sh.Unknown2 != 0xBB {
|
||||
t.Errorf("header fields: type=%02X unk1=%02X unk2=%02X", sh.Type, sh.Unknown1, sh.Unknown2)
|
||||
}
|
||||
|
||||
// Compile and parse again — metadata must survive
|
||||
jsonData, _ := json.Marshal(s)
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("compile: %v", err)
|
||||
}
|
||||
s2, err := ParseScenarioBinary(compiled)
|
||||
if err != nil {
|
||||
t.Fatalf("re-parse: %v", err)
|
||||
}
|
||||
sh2 := s2.Chunk0.Subheader
|
||||
if sh2.Metadata != sh.Metadata {
|
||||
t.Errorf("metadata changed:\n before: %s\n after: %s", sh.Metadata, sh2.Metadata)
|
||||
}
|
||||
if sh2.Unknown1 != sh.Unknown1 || sh2.Unknown2 != sh.Unknown2 {
|
||||
t.Errorf("unknown fields changed: unk1 %02X→%02X unk2 %02X→%02X",
|
||||
sh.Unknown1, sh2.Unknown1, sh.Unknown2, sh2.Unknown2)
|
||||
}
|
||||
}
|
||||
|
||||
// ── real-file round-trip tests ────────────────────────────────────────────────
|
||||
|
||||
// scenarioBinPath is the relative path from the package to the scenario files.
|
||||
// These tests are skipped if the directory does not exist (CI without game data).
|
||||
const scenarioBinPath = "../../bin/scenarios"
|
||||
|
||||
func TestScenarioRoundTrip_RealFiles(t *testing.T) {
|
||||
samples := []struct {
|
||||
name string
|
||||
wantC0 bool // expect chunk0 subheader
|
||||
wantC1 bool // expect chunk1 (subheader or JKR)
|
||||
}{
|
||||
// cat=0 basic quest scenarios (chunk0 subheader, no chunk1)
|
||||
{"0_0_0_0_S0_T101_C0", true, false},
|
||||
{"0_0_0_0_S1_T101_C0", true, false},
|
||||
{"0_0_0_0_S5_T101_C0", true, false},
|
||||
// cat=1 GR scenarios (chunk0 subheader, T101 has no chunk1)
|
||||
{"1_0_0_0_S0_T101_C0", true, false},
|
||||
{"1_0_0_0_S1_T101_C0", true, false},
|
||||
// cat=3 item exchange (chunk0 subheader, chunk1 subheader with extra data)
|
||||
{"3_0_0_0_S0_T103_C0", true, true},
|
||||
// multi-chapter file with chunk1 subheader
|
||||
{"0_0_0_0_S0_T103_C0", true, true},
|
||||
}
|
||||
|
||||
for _, tc := range samples {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
path := scenarioBinPath + "/" + tc.name + ".bin"
|
||||
original, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
t.Skipf("scenario file not found (game data not present): %v", err)
|
||||
}
|
||||
|
||||
// Parse binary → JSON schema
|
||||
parsed, err := ParseScenarioBinary(original)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseScenarioBinary: %v", err)
|
||||
}
|
||||
|
||||
// Verify expected chunk presence
|
||||
if tc.wantC0 && (parsed.Chunk0 == nil || parsed.Chunk0.Subheader == nil) {
|
||||
t.Error("expected chunk0 subheader")
|
||||
}
|
||||
if tc.wantC1 && parsed.Chunk1 == nil {
|
||||
t.Error("expected chunk1")
|
||||
}
|
||||
|
||||
// Marshal to JSON
|
||||
jsonData, err := json.Marshal(parsed)
|
||||
if err != nil {
|
||||
t.Fatalf("json.Marshal: %v", err)
|
||||
}
|
||||
|
||||
// Compile JSON → binary
|
||||
compiled, err := CompileScenarioJSON(jsonData)
|
||||
if err != nil {
|
||||
t.Fatalf("CompileScenarioJSON: %v", err)
|
||||
}
|
||||
|
||||
// Re-parse compiled output
|
||||
result, err := ParseScenarioBinary(compiled)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseScenarioBinary on compiled output: %v", err)
|
||||
}
|
||||
|
||||
// Verify strings survive round-trip unchanged
|
||||
origStrings := extractStringsFromScenario(t, original)
|
||||
gotStrings := extractStringsFromScenario(t, compiled)
|
||||
if len(gotStrings) != len(origStrings) {
|
||||
t.Fatalf("string count changed: %d → %d", len(origStrings), len(gotStrings))
|
||||
}
|
||||
for i := range origStrings {
|
||||
if gotStrings[i] != origStrings[i] {
|
||||
t.Errorf("[%d]: %q → %q", i, origStrings[i], gotStrings[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Verify metadata is preserved byte-for-byte
|
||||
if parsed.Chunk0 != nil && parsed.Chunk0.Subheader != nil {
|
||||
if result.Chunk0 == nil || result.Chunk0.Subheader == nil {
|
||||
t.Fatal("chunk0 subheader lost in round-trip")
|
||||
}
|
||||
if result.Chunk0.Subheader.Metadata != parsed.Chunk0.Subheader.Metadata {
|
||||
t.Errorf("chunk0 metadata changed after round-trip")
|
||||
}
|
||||
}
|
||||
if parsed.Chunk1 != nil && parsed.Chunk1.Subheader != nil {
|
||||
if result.Chunk1 == nil || result.Chunk1.Subheader == nil {
|
||||
t.Fatal("chunk1 subheader lost in round-trip")
|
||||
}
|
||||
if result.Chunk1.Subheader.Metadata != parsed.Chunk1.Subheader.Metadata {
|
||||
t.Errorf("chunk1 metadata changed after round-trip")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -280,16 +280,16 @@ func (svc *GuildService) PostScout(actorCharID, targetCharID uint32, strings Sco
|
||||
return fmt.Errorf("guild lookup: %w", err)
|
||||
}
|
||||
|
||||
hasApp, err := svc.guildRepo.HasApplication(guild.ID, targetCharID)
|
||||
hasInvite, err := svc.guildRepo.HasInvite(guild.ID, targetCharID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("check application: %w", err)
|
||||
return fmt.Errorf("check invite: %w", err)
|
||||
}
|
||||
if hasApp {
|
||||
if hasInvite {
|
||||
return ErrAlreadyInvited
|
||||
}
|
||||
|
||||
err = svc.guildRepo.CreateApplicationWithMail(
|
||||
guild.ID, targetCharID, actorCharID, GuildApplicationTypeInvited,
|
||||
err = svc.guildRepo.CreateInviteWithMail(
|
||||
guild.ID, targetCharID, actorCharID,
|
||||
actorCharID, targetCharID,
|
||||
strings.Title,
|
||||
fmt.Sprintf(strings.Body, guild.Name))
|
||||
@@ -309,8 +309,8 @@ func (svc *GuildService) AnswerScout(charID, leaderID uint32, accept bool, strin
|
||||
return nil, fmt.Errorf("guild lookup for leader %d: %w", leaderID, err)
|
||||
}
|
||||
|
||||
app, err := svc.guildRepo.GetApplication(guild.ID, charID, GuildApplicationTypeInvited)
|
||||
if app == nil || err != nil {
|
||||
hasInvite, err := svc.guildRepo.HasInvite(guild.ID, charID)
|
||||
if err != nil || !hasInvite {
|
||||
return &AnswerScoutResult{
|
||||
GuildID: guild.ID,
|
||||
Success: false,
|
||||
@@ -319,13 +319,13 @@ func (svc *GuildService) AnswerScout(charID, leaderID uint32, accept bool, strin
|
||||
|
||||
var mails []Mail
|
||||
if accept {
|
||||
err = svc.guildRepo.AcceptApplication(guild.ID, charID)
|
||||
err = svc.guildRepo.AcceptInvite(guild.ID, charID)
|
||||
mails = []Mail{
|
||||
{SenderID: 0, RecipientID: charID, Subject: strings.SuccessTitle, Body: fmt.Sprintf(strings.SuccessBody, guild.Name), IsSystemMessage: true},
|
||||
{SenderID: charID, RecipientID: leaderID, Subject: strings.AcceptedTitle, Body: fmt.Sprintf(strings.AcceptedBody, guild.Name), IsSystemMessage: true},
|
||||
}
|
||||
} else {
|
||||
err = svc.guildRepo.RejectApplication(guild.ID, charID)
|
||||
err = svc.guildRepo.DeclineInvite(guild.ID, charID)
|
||||
mails = []Mail{
|
||||
{SenderID: 0, RecipientID: charID, Subject: strings.RejectedTitle, Body: fmt.Sprintf(strings.RejectedBody, guild.Name), IsSystemMessage: true},
|
||||
{SenderID: charID, RecipientID: leaderID, Subject: strings.DeclinedTitle, Body: fmt.Sprintf(strings.DeclinedBody, guild.Name), IsSystemMessage: true},
|
||||
|
||||
@@ -385,14 +385,14 @@ func TestGuildService_PostScout(t *testing.T) {
|
||||
strings := ScoutInviteStrings{Title: "Invite", Body: "Join 「%s」"}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
membership *GuildMember
|
||||
guild *Guild
|
||||
hasApp bool
|
||||
hasAppErr error
|
||||
createAppErr error
|
||||
getMemberErr error
|
||||
wantErr error
|
||||
name string
|
||||
membership *GuildMember
|
||||
guild *Guild
|
||||
hasInvite bool
|
||||
hasInviteErr error
|
||||
createAppErr error
|
||||
getMemberErr error
|
||||
wantErr error
|
||||
}{
|
||||
{
|
||||
name: "successful scout",
|
||||
@@ -403,7 +403,7 @@ func TestGuildService_PostScout(t *testing.T) {
|
||||
name: "already invited",
|
||||
membership: &GuildMember{GuildID: 10, CharID: 1, IsLeader: true, OrderIndex: 1},
|
||||
guild: &Guild{ID: 10, Name: "TestGuild"},
|
||||
hasApp: true,
|
||||
hasInvite: true,
|
||||
wantErr: ErrAlreadyInvited,
|
||||
},
|
||||
{
|
||||
@@ -423,11 +423,11 @@ func TestGuildService_PostScout(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
guildMock := &mockGuildRepo{
|
||||
membership: tt.membership,
|
||||
hasAppResult: tt.hasApp,
|
||||
hasAppErr: tt.hasAppErr,
|
||||
createAppErr: tt.createAppErr,
|
||||
getMemberErr: tt.getMemberErr,
|
||||
membership: tt.membership,
|
||||
hasInviteResult: tt.hasInvite,
|
||||
hasInviteErr: tt.hasInviteErr,
|
||||
createAppErr: tt.createAppErr,
|
||||
getMemberErr: tt.getMemberErr,
|
||||
}
|
||||
guildMock.guild = tt.guild
|
||||
svc := newTestGuildService(guildMock, &mockMailRepo{})
|
||||
@@ -468,7 +468,7 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
name string
|
||||
accept bool
|
||||
guild *Guild
|
||||
application *GuildApplication
|
||||
hasInvite bool
|
||||
acceptErr error
|
||||
rejectErr error
|
||||
sendErr error
|
||||
@@ -477,13 +477,13 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
wantErr error
|
||||
wantMailCount int
|
||||
wantAccepted uint32
|
||||
wantRejected uint32
|
||||
wantDeclined uint32
|
||||
}{
|
||||
{
|
||||
name: "accept invitation",
|
||||
accept: true,
|
||||
guild: &Guild{ID: 10, Name: "TestGuild", GuildLeader: GuildLeader{LeaderCharID: 50}},
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInvite: true,
|
||||
wantSuccess: true,
|
||||
wantMailCount: 2,
|
||||
wantAccepted: 1,
|
||||
@@ -492,16 +492,16 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
name: "decline invitation",
|
||||
accept: false,
|
||||
guild: &Guild{ID: 10, Name: "TestGuild", GuildLeader: GuildLeader{LeaderCharID: 50}},
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInvite: true,
|
||||
wantSuccess: true,
|
||||
wantMailCount: 2,
|
||||
wantRejected: 1,
|
||||
wantDeclined: 1,
|
||||
},
|
||||
{
|
||||
name: "application missing",
|
||||
accept: true,
|
||||
guild: &Guild{ID: 10, Name: "TestGuild", GuildLeader: GuildLeader{LeaderCharID: 50}},
|
||||
application: nil,
|
||||
hasInvite: false,
|
||||
wantSuccess: false,
|
||||
wantErr: ErrApplicationMissing,
|
||||
},
|
||||
@@ -516,7 +516,7 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
name: "mail error is best-effort",
|
||||
accept: true,
|
||||
guild: &Guild{ID: 10, Name: "TestGuild", GuildLeader: GuildLeader{LeaderCharID: 50}},
|
||||
application: &GuildApplication{GuildID: 10, CharID: 1},
|
||||
hasInvite: true,
|
||||
sendErr: errors.New("mail failed"),
|
||||
wantSuccess: true,
|
||||
wantMailCount: 2,
|
||||
@@ -527,9 +527,9 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
guildMock := &mockGuildRepo{
|
||||
application: tt.application,
|
||||
acceptErr: tt.acceptErr,
|
||||
rejectErr: tt.rejectErr,
|
||||
hasInviteResult: tt.hasInvite,
|
||||
acceptErr: tt.acceptErr,
|
||||
rejectErr: tt.rejectErr,
|
||||
}
|
||||
guildMock.guild = tt.guild
|
||||
guildMock.getErr = tt.getErr
|
||||
@@ -559,11 +559,11 @@ func TestGuildService_AnswerScout(t *testing.T) {
|
||||
if len(mailMock.sentMails) != tt.wantMailCount {
|
||||
t.Errorf("sentMails count = %d, want %d", len(mailMock.sentMails), tt.wantMailCount)
|
||||
}
|
||||
if tt.wantAccepted != 0 && guildMock.acceptedCharID != tt.wantAccepted {
|
||||
t.Errorf("acceptedCharID = %d, want %d", guildMock.acceptedCharID, tt.wantAccepted)
|
||||
if tt.wantAccepted != 0 && guildMock.acceptInviteCharID != tt.wantAccepted {
|
||||
t.Errorf("acceptInviteCharID = %d, want %d", guildMock.acceptInviteCharID, tt.wantAccepted)
|
||||
}
|
||||
if tt.wantRejected != 0 && guildMock.rejectedCharID != tt.wantRejected {
|
||||
t.Errorf("rejectedCharID = %d, want %d", guildMock.rejectedCharID, tt.wantRejected)
|
||||
if tt.wantDeclined != 0 && guildMock.declineInviteCharID != tt.wantDeclined {
|
||||
t.Errorf("declineInviteCharID = %d, want %d", guildMock.declineInviteCharID, tt.wantDeclined)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"time"
|
||||
|
||||
"erupe-ce/common/byteframe"
|
||||
"erupe-ce/common/decryption"
|
||||
cfg "erupe-ce/config"
|
||||
"erupe-ce/network"
|
||||
"erupe-ce/network/binpacket"
|
||||
@@ -74,6 +76,7 @@ type Server struct {
|
||||
miscRepo MiscRepo
|
||||
scenarioRepo ScenarioRepo
|
||||
mercenaryRepo MercenaryRepo
|
||||
tournamentRepo TournamentRepo
|
||||
mailService *MailService
|
||||
guildService *GuildService
|
||||
achievementService *AchievementService
|
||||
@@ -167,6 +170,7 @@ func NewServer(config *Config) *Server {
|
||||
s.miscRepo = NewMiscRepository(config.DB)
|
||||
s.scenarioRepo = NewScenarioRepository(config.DB)
|
||||
s.mercenaryRepo = NewMercenaryRepository(config.DB)
|
||||
s.tournamentRepo = NewTournamentRepository(config.DB)
|
||||
|
||||
s.mailService = NewMailService(s.mailRepo, s.guildRepo, s.logger)
|
||||
s.guildService = NewGuildService(s.guildRepo, s.mailService, s.charRepo, s.logger)
|
||||
@@ -245,6 +249,51 @@ func (s *Server) Shutdown() {
|
||||
|
||||
}
|
||||
|
||||
// ShutdownAndDrain stops accepting new connections, force-closes every active
|
||||
// session so that their logoutPlayer cleanup runs (saves character data, removes
|
||||
// from stages, etc.), then waits until all sessions have been removed from the
|
||||
// sessions map or ctx is cancelled. It is safe to call multiple times.
|
||||
func (s *Server) ShutdownAndDrain(ctx context.Context) {
|
||||
s.Shutdown()
|
||||
|
||||
// Snapshot all active connections while holding the lock, then close them
|
||||
// outside the lock so we don't hold it during I/O. Closing a connection
|
||||
// causes the session's recvLoop to see io.EOF and call logoutPlayer(), which
|
||||
// in turn deletes the entry from s.sessions under the server mutex.
|
||||
s.Lock()
|
||||
conns := make([]net.Conn, 0, len(s.sessions))
|
||||
for conn := range s.sessions {
|
||||
conns = append(conns, conn)
|
||||
}
|
||||
s.Unlock()
|
||||
|
||||
for _, conn := range conns {
|
||||
_ = conn.Close()
|
||||
}
|
||||
|
||||
// Poll until logoutPlayer has removed every session or the deadline passes.
|
||||
ticker := time.NewTicker(50 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
s.Lock()
|
||||
remaining := len(s.sessions)
|
||||
s.Unlock()
|
||||
s.logger.Warn("Shutdown drain timed out", zap.Int("remaining_sessions", remaining))
|
||||
return
|
||||
case <-ticker.C:
|
||||
s.Lock()
|
||||
n := len(s.sessions)
|
||||
s.Unlock()
|
||||
if n == 0 {
|
||||
s.logger.Info("Shutdown drain complete")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) acceptClients() {
|
||||
for {
|
||||
conn, err := s.listener.Accept()
|
||||
@@ -449,31 +498,52 @@ func (s *Server) Season() uint8 {
|
||||
return uint8(((TimeAdjusted().Unix() / secsPerDay) + sid) % 3)
|
||||
}
|
||||
|
||||
// ecdMagic is the ECD magic as read by binary.LittleEndian.Uint32.
|
||||
// On-disk bytes: 65 63 64 1A ("ecd\x1a"), LE-decoded: 0x1A646365.
|
||||
const ecdMagic = uint32(0x1A646365)
|
||||
|
||||
// loadRengokuBinary reads and validates rengoku_data.bin from binPath.
|
||||
// Returns the raw bytes on success, or nil if the file is missing or invalid.
|
||||
// loadRengokuBinary loads and caches Hunting Road config. It tries
|
||||
// rengoku_data.bin first and falls back to rengoku_data.json (built on the
|
||||
// fly). Returns ECD-encrypted bytes ready to serve, or nil if no valid source
|
||||
// is found.
|
||||
func loadRengokuBinary(binPath string, logger *zap.Logger) []byte {
|
||||
path := filepath.Join(binPath, "rengoku_data.bin")
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
logger.Warn("rengoku_data.bin not found, Hunting Road will be unavailable",
|
||||
zap.String("path", path), zap.Error(err))
|
||||
return nil
|
||||
if err == nil {
|
||||
if len(data) < 4 {
|
||||
logger.Warn("rengoku_data.bin too small, ignoring",
|
||||
zap.Int("bytes", len(data)))
|
||||
} else if magic := binary.LittleEndian.Uint32(data[:4]); magic != decryption.ECDMagic {
|
||||
logger.Warn("rengoku_data.bin has invalid ECD magic, ignoring",
|
||||
zap.String("expected", fmt.Sprintf("0x%08x", decryption.ECDMagic)),
|
||||
zap.String("got", fmt.Sprintf("0x%08x", magic)))
|
||||
} else {
|
||||
// Decrypt and decompress to validate the internal structure and emit a
|
||||
// human-readable summary at startup. Failures here are non-fatal: the
|
||||
// encrypted blob is still served to clients unchanged.
|
||||
if plain, decErr := decryption.DecodeECD(data); decErr != nil {
|
||||
logger.Warn("rengoku_data.bin ECD decryption failed — serving anyway",
|
||||
zap.Error(decErr))
|
||||
} else {
|
||||
raw := decryption.UnpackSimple(plain)
|
||||
if info, parseErr := parseRengokuBinary(raw); parseErr != nil {
|
||||
logger.Warn("rengoku_data.bin structural validation failed",
|
||||
zap.Error(parseErr))
|
||||
} else {
|
||||
logger.Info("Hunting Road config",
|
||||
zap.Int("multi_floors", info.MultiFloors),
|
||||
zap.Int("multi_spawn_tables", info.MultiSpawnTables),
|
||||
zap.Int("solo_floors", info.SoloFloors),
|
||||
zap.Int("solo_spawn_tables", info.SoloSpawnTables),
|
||||
zap.Int("unique_monsters", info.UniqueMonsters),
|
||||
)
|
||||
}
|
||||
}
|
||||
logger.Info("Loaded rengoku_data.bin", zap.Int("bytes", len(data)))
|
||||
return data
|
||||
}
|
||||
}
|
||||
if len(data) < 4 {
|
||||
logger.Warn("rengoku_data.bin too small, ignoring",
|
||||
zap.Int("bytes", len(data)))
|
||||
return nil
|
||||
|
||||
if enc := loadRengokuFromJSON(binPath, logger); enc != nil {
|
||||
return enc
|
||||
}
|
||||
if magic := binary.LittleEndian.Uint32(data[:4]); magic != ecdMagic {
|
||||
logger.Warn("rengoku_data.bin has invalid ECD magic, ignoring",
|
||||
zap.String("expected", "0x1a646365"),
|
||||
zap.String("got", fmt.Sprintf("0x%08x", magic)))
|
||||
return nil
|
||||
}
|
||||
logger.Info("Loaded rengoku_data.bin", zap.Int("bytes", len(data)))
|
||||
return data
|
||||
|
||||
logger.Warn("No Hunting Road config found (rengoku_data.bin or rengoku_data.json), Hunting Road will be unavailable")
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
cfg "erupe-ce/config"
|
||||
"erupe-ce/common/decryption"
|
||||
"erupe-ce/network/clientctx"
|
||||
"erupe-ce/network/mhfpacket"
|
||||
|
||||
@@ -750,7 +751,7 @@ func TestLoadRengokuBinary_ValidECD(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
// Build a minimal valid ECD file: magic + some payload
|
||||
data := make([]byte, 16)
|
||||
binary.LittleEndian.PutUint32(data[:4], ecdMagic)
|
||||
binary.LittleEndian.PutUint32(data[:4], decryption.ECDMagic)
|
||||
if err := os.WriteFile(filepath.Join(dir, "rengoku_data.bin"), data, 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
package channelserver
|
||||
|
||||
// Bead holds the display strings for a single kiju prayer bead type.
|
||||
type Bead struct {
|
||||
ID int
|
||||
Name string
|
||||
Description string
|
||||
}
|
||||
|
||||
type i18n struct {
|
||||
language string
|
||||
beads []Bead
|
||||
cafe struct {
|
||||
reset string
|
||||
}
|
||||
@@ -79,7 +87,9 @@ type i18n struct {
|
||||
berserkSmall string
|
||||
}
|
||||
guild struct {
|
||||
invite struct {
|
||||
rookieGuildName string
|
||||
returnGuildName string
|
||||
invite struct {
|
||||
title string
|
||||
body string
|
||||
success struct {
|
||||
@@ -102,140 +112,37 @@ type i18n struct {
|
||||
}
|
||||
}
|
||||
|
||||
// beadName returns the localised name for a bead type.
|
||||
func (i *i18n) beadName(beadType int) string {
|
||||
for _, b := range i.beads {
|
||||
if b.ID == beadType {
|
||||
return b.Name
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// beadDescription returns the localised description for a bead type.
|
||||
func (i *i18n) beadDescription(beadType int) string {
|
||||
for _, b := range i.beads {
|
||||
if b.ID == beadType {
|
||||
return b.Description
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// getLangStrings returns the i18n string table for the configured language,
|
||||
// falling back to English for unknown language codes.
|
||||
func getLangStrings(s *Server) i18n {
|
||||
var i i18n
|
||||
switch s.erupeConfig.Language {
|
||||
case "jp":
|
||||
i.language = "日本語"
|
||||
i.cafe.reset = "%d/%dにリセット"
|
||||
i.timer = "タイマー:%02d'%02d\"%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "You don't have permission to use this command"
|
||||
i.commands.disabled = "%sのコマンドは無効です"
|
||||
i.commands.reload = "リロードします"
|
||||
i.commands.kqf.get = "現在のキークエストフラグ:%x"
|
||||
i.commands.kqf.set.error = "キークエコマンドエラー 例:%s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "キークエストのフラグが更新されました。ワールド/ランドを移動してください"
|
||||
i.commands.kqf.version = "This command is disabled prior to MHFG10"
|
||||
i.commands.rights.error = "コース更新コマンドエラー 例:%s x"
|
||||
i.commands.rights.success = "コース情報を更新しました:%d"
|
||||
i.commands.course.error = "コース確認コマンドエラー 例:%s <name>"
|
||||
i.commands.course.disabled = "%sコースは無効です"
|
||||
i.commands.course.enabled = "%sコースは有効です"
|
||||
i.commands.course.locked = "%sコースはロックされています"
|
||||
i.commands.teleport.error = "テレポートコマンドエラー 構文:%s x y"
|
||||
i.commands.teleport.success = "%d %dにテレポート"
|
||||
i.commands.psn.error = "PSN連携コマンドエラー 例:%s <psn id>"
|
||||
i.commands.psn.success = "PSN「%s」が連携されています"
|
||||
i.commands.psn.exists = "PSNは既存のユーザに接続されています"
|
||||
|
||||
i.commands.discord.success = "あなたのDiscordトークン:%s"
|
||||
|
||||
i.commands.ban.noUser = "Could not find user"
|
||||
i.commands.ban.success = "Successfully banned %s"
|
||||
i.commands.ban.invalid = "Invalid Character ID"
|
||||
i.commands.ban.error = "Error in command. Format: %s <id> [length]"
|
||||
i.commands.ban.length = " until %s"
|
||||
|
||||
i.commands.ravi.noCommand = "ラヴィコマンドが指定されていません"
|
||||
i.commands.ravi.start.success = "大討伐を開始します"
|
||||
i.commands.ravi.start.error = "大討伐は既に開催されています"
|
||||
i.commands.ravi.multiplier = "ラヴィダメージ倍率:x%.2f"
|
||||
i.commands.ravi.res.success = "復活支援を実行します"
|
||||
i.commands.ravi.res.error = "復活支援は実行されませんでした"
|
||||
i.commands.ravi.sed.success = "鎮静支援を実行します"
|
||||
i.commands.ravi.request = "鎮静支援を要請します"
|
||||
i.commands.ravi.error = "ラヴィコマンドが認識されません"
|
||||
i.commands.ravi.noPlayers = "誰も大討伐に参加していません"
|
||||
i.commands.ravi.version = "This command is disabled outside of MHFZZ"
|
||||
|
||||
i.raviente.berserk = "<大討伐:猛狂期>が開催されました!"
|
||||
i.raviente.extreme = "<大討伐:猛狂期【極】>が開催されました!"
|
||||
i.raviente.extremeLimited = "<大討伐:猛狂期【極】(制限付)>が開催されました!"
|
||||
i.raviente.berserkSmall = "<大討伐:猛狂期(小数)>が開催されました!"
|
||||
|
||||
i.guild.invite.title = "猟団勧誘のご案内"
|
||||
i.guild.invite.body = "猟団「%s」からの勧誘通知です。\n「勧誘に返答」より、返答を行ってください。"
|
||||
|
||||
i.guild.invite.success.title = "成功"
|
||||
i.guild.invite.success.body = "あなたは「%s」に参加できました。"
|
||||
|
||||
i.guild.invite.accepted.title = "承諾されました"
|
||||
i.guild.invite.accepted.body = "招待した狩人が「%s」への招待を承諾しました。"
|
||||
|
||||
i.guild.invite.rejected.title = "却下しました"
|
||||
i.guild.invite.rejected.body = "あなたは「%s」への参加を却下しました。"
|
||||
|
||||
i.guild.invite.declined.title = "辞退しました"
|
||||
i.guild.invite.declined.body = "招待した狩人が「%s」への招待を辞退しました。"
|
||||
return langJapanese()
|
||||
case "fr":
|
||||
return langFrench()
|
||||
case "es":
|
||||
return langSpanish()
|
||||
default:
|
||||
i.language = "English"
|
||||
i.cafe.reset = "Resets on %d/%d"
|
||||
i.timer = "Time: %02d:%02d:%02d.%03d (%df)"
|
||||
|
||||
i.commands.noOp = "You don't have permission to use this command"
|
||||
i.commands.disabled = "%s command is disabled"
|
||||
i.commands.reload = "Reloading players..."
|
||||
i.commands.playtime = "Playtime: %d hours %d minutes %d seconds"
|
||||
|
||||
i.commands.kqf.get = "KQF: %x"
|
||||
i.commands.kqf.set.error = "Error in command. Format: %s set xxxxxxxxxxxxxxxx"
|
||||
i.commands.kqf.set.success = "KQF set, please switch Land/World"
|
||||
i.commands.kqf.version = "This command is disabled prior to MHFG10"
|
||||
i.commands.rights.error = "Error in command. Format: %s x"
|
||||
i.commands.rights.success = "Set rights integer: %d"
|
||||
i.commands.course.error = "Error in command. Format: %s <name>"
|
||||
i.commands.course.disabled = "%s Course disabled"
|
||||
i.commands.course.enabled = "%s Course enabled"
|
||||
i.commands.course.locked = "%s Course is locked"
|
||||
i.commands.teleport.error = "Error in command. Format: %s x y"
|
||||
i.commands.teleport.success = "Teleporting to %d %d"
|
||||
i.commands.psn.error = "Error in command. Format: %s <psn id>"
|
||||
i.commands.psn.success = "Connected PSN ID: %s"
|
||||
i.commands.psn.exists = "PSN ID is connected to another account!"
|
||||
|
||||
i.commands.discord.success = "Your Discord token: %s"
|
||||
|
||||
i.commands.ban.noUser = "Could not find user"
|
||||
i.commands.ban.success = "Successfully banned %s"
|
||||
i.commands.ban.invalid = "Invalid Character ID"
|
||||
i.commands.ban.error = "Error in command. Format: %s <id> [length]"
|
||||
i.commands.ban.length = " until %s"
|
||||
|
||||
i.commands.timer.enabled = "Quest timer enabled"
|
||||
i.commands.timer.disabled = "Quest timer disabled"
|
||||
|
||||
i.commands.ravi.noCommand = "No Raviente command specified!"
|
||||
i.commands.ravi.start.success = "The Great Slaying will begin in a moment"
|
||||
i.commands.ravi.start.error = "The Great Slaying has already begun!"
|
||||
i.commands.ravi.multiplier = "Raviente multiplier is currently %.2fx"
|
||||
i.commands.ravi.res.success = "Sending resurrection support!"
|
||||
i.commands.ravi.res.error = "Resurrection support has not been requested!"
|
||||
i.commands.ravi.sed.success = "Sending sedation support if requested!"
|
||||
i.commands.ravi.request = "Requesting sedation support!"
|
||||
i.commands.ravi.error = "Raviente command not recognised!"
|
||||
i.commands.ravi.noPlayers = "No one has joined the Great Slaying!"
|
||||
i.commands.ravi.version = "This command is disabled outside of MHFZZ"
|
||||
|
||||
i.raviente.berserk = "<Great Slaying: Berserk> is being held!"
|
||||
i.raviente.extreme = "<Great Slaying: Extreme> is being held!"
|
||||
i.raviente.extremeLimited = "<Great Slaying: Extreme (Limited)> is being held!"
|
||||
i.raviente.berserkSmall = "<Great Slaying: Berserk (Small)> is being held!"
|
||||
|
||||
i.guild.invite.title = "Invitation!"
|
||||
i.guild.invite.body = "You have been invited to join\n「%s」\nDo you want to accept?"
|
||||
|
||||
i.guild.invite.success.title = "Success!"
|
||||
i.guild.invite.success.body = "You have successfully joined\n「%s」."
|
||||
|
||||
i.guild.invite.accepted.title = "Accepted"
|
||||
i.guild.invite.accepted.body = "The recipient accepted your invitation to join\n「%s」."
|
||||
|
||||
i.guild.invite.rejected.title = "Rejected"
|
||||
i.guild.invite.rejected.body = "You rejected the invitation to join\n「%s」."
|
||||
|
||||
i.guild.invite.declined.title = "Declined"
|
||||
i.guild.invite.declined.body = "The recipient declined your invitation to join\n「%s」."
|
||||
return langEnglish()
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package channelserver
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
cfg "erupe-ce/config"
|
||||
@@ -92,3 +94,36 @@ func TestGetLangStrings_EmptyLanguage(t *testing.T) {
|
||||
t.Errorf("Empty language should default to English, got %q", lang.language)
|
||||
}
|
||||
}
|
||||
|
||||
// checkNoEmptyStrings recursively walks v and fails the test for any empty string field.
|
||||
func checkNoEmptyStrings(t *testing.T, v reflect.Value, path string) {
|
||||
t.Helper()
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
if v.String() == "" {
|
||||
t.Errorf("missing translation: %s is empty", path)
|
||||
}
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
checkNoEmptyStrings(t, v.Field(i), path+"."+v.Type().Field(i).Name)
|
||||
}
|
||||
case reflect.Slice:
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
checkNoEmptyStrings(t, v.Index(i), fmt.Sprintf("%s[%d]", path, i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLangCompleteness(t *testing.T) {
|
||||
languages := map[string]i18n{
|
||||
"en": langEnglish(),
|
||||
"jp": langJapanese(),
|
||||
"fr": langFrench(),
|
||||
"es": langSpanish(),
|
||||
}
|
||||
for code, lang := range languages {
|
||||
t.Run(code, func(t *testing.T) {
|
||||
checkNoEmptyStrings(t, reflect.ValueOf(lang), code)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,6 +72,10 @@ type Session struct {
|
||||
// Contains the mail list that maps accumulated indexes to mail IDs
|
||||
mailList []int
|
||||
|
||||
// currentBeadIndex is the bead slot selected by the player via MsgMhfSetKiju.
|
||||
// A value of -1 means no bead is currently assigned this session.
|
||||
currentBeadIndex int
|
||||
|
||||
Name string
|
||||
closed atomic.Bool
|
||||
ackStart map[uint32]time.Time
|
||||
@@ -86,20 +90,21 @@ func NewSession(server *Server, conn net.Conn) *Session {
|
||||
cryptConn, captureConn, captureCleanup := startCapture(server, cryptConn, conn.RemoteAddr(), pcap.ServerTypeChannel)
|
||||
|
||||
s := &Session{
|
||||
logger: server.logger.Named(conn.RemoteAddr().String()),
|
||||
server: server,
|
||||
rawConn: conn,
|
||||
cryptConn: cryptConn,
|
||||
sendPackets: make(chan packet, 20),
|
||||
clientContext: &clientctx.ClientContext{RealClientMode: server.erupeConfig.RealClientMode},
|
||||
lastPacket: time.Now(),
|
||||
objectID: server.getObjectId(),
|
||||
sessionStart: TimeAdjusted().Unix(),
|
||||
stageMoveStack: stringstack.New(),
|
||||
ackStart: make(map[uint32]time.Time),
|
||||
semaphoreID: make([]uint16, 2),
|
||||
captureConn: captureConn,
|
||||
captureCleanup: captureCleanup,
|
||||
logger: server.logger.Named(conn.RemoteAddr().String()),
|
||||
server: server,
|
||||
rawConn: conn,
|
||||
cryptConn: cryptConn,
|
||||
sendPackets: make(chan packet, 20),
|
||||
clientContext: &clientctx.ClientContext{RealClientMode: server.erupeConfig.RealClientMode},
|
||||
lastPacket: time.Now(),
|
||||
objectID: server.getObjectId(),
|
||||
sessionStart: TimeAdjusted().Unix(),
|
||||
stageMoveStack: stringstack.New(),
|
||||
ackStart: make(map[uint32]time.Time),
|
||||
semaphoreID: make([]uint16, 2),
|
||||
captureConn: captureConn,
|
||||
captureCleanup: captureCleanup,
|
||||
currentBeadIndex: -1,
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
@@ -48,6 +48,10 @@ func createMockServer() *Server {
|
||||
state: make([]uint32, 30),
|
||||
support: make([]uint32, 30),
|
||||
},
|
||||
// divaRepo and tournamentRepo defaults prevent nil-deref in handler tests
|
||||
// that don't need specific repo behaviour. Tests that need controlled data override them.
|
||||
divaRepo: &mockDivaRepo{},
|
||||
tournamentRepo: &mockTournamentRepo{},
|
||||
}
|
||||
s.i18n = getLangStrings(s)
|
||||
s.Registry = NewLocalChannelRegistry([]*Server{s})
|
||||
|
||||
5169
server/migrations/seed/CampaignDemo.sql
Normal file
5169
server/migrations/seed/CampaignDemo.sql
Normal file
File diff suppressed because it is too large
Load Diff
32
server/migrations/seed/DivaDefaults.sql
Normal file
32
server/migrations/seed/DivaDefaults.sql
Normal file
@@ -0,0 +1,32 @@
|
||||
-- Diva Defense default prize rewards.
|
||||
-- Personal track: type='personal', quantity=1 per milestone.
|
||||
-- Guild track: type='guild', quantity=5 per milestone.
|
||||
-- item_type=26 is Diva Coins; item_id=0 for all.
|
||||
INSERT INTO diva_prizes (type, points_req, item_type, item_id, quantity, gr, repeatable) VALUES
|
||||
('personal', 500000, 26, 0, 1, false, false),
|
||||
('personal', 1000000, 26, 0, 1, false, false),
|
||||
('personal', 2000000, 26, 0, 1, false, false),
|
||||
('personal', 3000000, 26, 0, 1, false, false),
|
||||
('personal', 5000000, 26, 0, 1, false, false),
|
||||
('personal', 7000000, 26, 0, 1, false, false),
|
||||
('personal', 10000000, 26, 0, 1, false, false),
|
||||
('personal', 15000000, 26, 0, 1, false, false),
|
||||
('personal', 20000000, 26, 0, 1, false, false),
|
||||
('personal', 30000000, 26, 0, 1, false, false),
|
||||
('personal', 50000000, 26, 0, 1, false, false),
|
||||
('personal', 70000000, 26, 0, 1, false, false),
|
||||
('personal', 100000000, 26, 0, 1, false, false),
|
||||
('guild', 500000, 26, 0, 5, false, false),
|
||||
('guild', 1000000, 26, 0, 5, false, false),
|
||||
('guild', 2000000, 26, 0, 5, false, false),
|
||||
('guild', 3000000, 26, 0, 5, false, false),
|
||||
('guild', 5000000, 26, 0, 5, false, false),
|
||||
('guild', 7000000, 26, 0, 5, false, false),
|
||||
('guild', 10000000, 26, 0, 5, false, false),
|
||||
('guild', 15000000, 26, 0, 5, false, false),
|
||||
('guild', 20000000, 26, 0, 5, false, false),
|
||||
('guild', 30000000, 26, 0, 5, false, false),
|
||||
('guild', 50000000, 26, 0, 5, false, false),
|
||||
('guild', 70000000, 26, 0, 5, false, false),
|
||||
('guild', 100000000, 26, 0, 5, false, false)
|
||||
ON CONFLICT DO NOTHING;
|
||||
62
server/migrations/seed/TournamentDefaults.sql
Normal file
62
server/migrations/seed/TournamentDefaults.sql
Normal file
@@ -0,0 +1,62 @@
|
||||
-- Tournament #150 default data.
|
||||
-- One tournament is inserted that starts immediately and has a wide window so operators
|
||||
-- can adjust the timestamps after installation. The sub-events and cups are seeded
|
||||
-- idempotently via ON CONFLICT DO NOTHING.
|
||||
-- Cup groups: 16 = speed hunt (Brachydios variants), 17 = guild hunt, 6 = fishing size.
|
||||
-- Cup types: 7 = speed hunt, 6 = fishing size.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Default tournament (always active on a fresh install).
|
||||
-- start_time = now, entry_end = +3 days, ranking_end = +13 days, reward_end = +20 days.
|
||||
INSERT INTO tournaments (name, start_time, entry_end, ranking_end, reward_end)
|
||||
SELECT
|
||||
'Tournament #150',
|
||||
EXTRACT(epoch FROM NOW())::bigint,
|
||||
EXTRACT(epoch FROM NOW() + INTERVAL '3 days')::bigint,
|
||||
EXTRACT(epoch FROM NOW() + INTERVAL '13 days')::bigint,
|
||||
EXTRACT(epoch FROM NOW() + INTERVAL '20 days')::bigint
|
||||
WHERE NOT EXISTS (SELECT 1 FROM tournaments);
|
||||
|
||||
-- Sub-events (shared across tournaments; NOT tournament-specific).
|
||||
-- CupGroup 16: Speed hunt Brachydios variants (event_sub_type 0-14, quest_file_id 60691).
|
||||
INSERT INTO tournament_sub_events (cup_group, event_sub_type, quest_file_id, name)
|
||||
SELECT * FROM (VALUES
|
||||
(16::smallint, 0::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 1::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 2::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 3::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 4::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 5::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 6::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 7::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 8::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 9::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 10::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 11::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 12::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 13::smallint, 60691, 'ブラキディオス'),
|
||||
(16::smallint, 14::smallint, 60691, 'ブラキディオス'),
|
||||
-- CupGroup 17: Guild hunt Brachydios (event_sub_type -1)
|
||||
(17::smallint, -1::smallint, 60690, 'ブラキディオスギルド'),
|
||||
-- CupGroup 6: Fishing size categories
|
||||
(6::smallint, 234::smallint, 0, 'キレアジ'),
|
||||
(6::smallint, 237::smallint, 0, 'ハリマグロ'),
|
||||
(6::smallint, 239::smallint, 0, 'カクサンデメキン')
|
||||
) AS v(cup_group, event_sub_type, quest_file_id, name)
|
||||
WHERE NOT EXISTS (SELECT 1 FROM tournament_sub_events);
|
||||
|
||||
-- Cups for the default tournament.
|
||||
-- cup_type 7 = speed hunt, cup_type 6 = fishing size.
|
||||
INSERT INTO tournament_cups (tournament_id, cup_group, cup_type, unk, name, description)
|
||||
SELECT t.id, v.cup_group, v.cup_type, v.unk, v.name, v.description
|
||||
FROM tournaments t
|
||||
CROSS JOIN (VALUES
|
||||
(16::smallint, 7::smallint, 0::smallint, 'スピードハントカップ', 'ブラキディオスをより速く狩れ'),
|
||||
(17::smallint, 7::smallint, 0::smallint, 'ギルドハントカップ', 'ブラキディオスをギルドで狩れ'),
|
||||
(6::smallint, 6::smallint, 0::smallint, 'フィッシングサイズカップ', '大きな魚を釣れ')
|
||||
) AS v(cup_group, cup_type, unk, name, description)
|
||||
WHERE NOT EXISTS (SELECT 1 FROM tournament_cups WHERE tournament_id = t.id)
|
||||
ORDER BY t.id;
|
||||
|
||||
COMMIT;
|
||||
@@ -998,7 +998,7 @@ CREATE TABLE public.guilds (
|
||||
pugi_name_1 character varying(12) DEFAULT ''::character varying,
|
||||
pugi_name_2 character varying(12) DEFAULT ''::character varying,
|
||||
pugi_name_3 character varying(12) DEFAULT ''::character varying,
|
||||
recruiting boolean DEFAULT true NOT NULL,
|
||||
recruiting boolean DEFAULT false NOT NULL,
|
||||
pugi_outfit_1 integer DEFAULT 0 NOT NULL,
|
||||
pugi_outfit_2 integer DEFAULT 0 NOT NULL,
|
||||
pugi_outfit_3 integer DEFAULT 0 NOT NULL,
|
||||
|
||||
@@ -1 +1 @@
|
||||
ALTER TABLE public.guild_alliances ADD COLUMN IF NOT EXISTS recruiting boolean NOT NULL DEFAULT true;
|
||||
ALTER TABLE public.guild_alliances ADD COLUMN IF NOT EXISTS recruiting boolean NOT NULL DEFAULT false;
|
||||
|
||||
66
server/migrations/sql/0016_campaign.sql
Normal file
66
server/migrations/sql/0016_campaign.sql
Normal file
@@ -0,0 +1,66 @@
|
||||
-- Campaign / Event Tent system tables.
|
||||
CREATE TABLE IF NOT EXISTS public.campaigns (
|
||||
id INTEGER PRIMARY KEY,
|
||||
min_hr INTEGER,
|
||||
max_hr INTEGER,
|
||||
min_sr INTEGER,
|
||||
max_sr INTEGER,
|
||||
min_gr INTEGER,
|
||||
max_gr INTEGER,
|
||||
reward_type INTEGER,
|
||||
stamps INTEGER,
|
||||
receive_type INTEGER,
|
||||
background_id INTEGER,
|
||||
start_time TIMESTAMP WITH TIME ZONE,
|
||||
end_time TIMESTAMP WITH TIME ZONE,
|
||||
title TEXT,
|
||||
reward TEXT,
|
||||
link TEXT,
|
||||
code_prefix TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_categories (
|
||||
id SERIAL PRIMARY KEY,
|
||||
type INTEGER,
|
||||
title TEXT,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_category_links (
|
||||
id SERIAL PRIMARY KEY,
|
||||
campaign_id INTEGER REFERENCES public.campaigns(id) ON DELETE CASCADE,
|
||||
category_id INTEGER REFERENCES public.campaign_categories(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_rewards (
|
||||
id SERIAL PRIMARY KEY,
|
||||
campaign_id INTEGER REFERENCES public.campaigns(id) ON DELETE CASCADE,
|
||||
item_type INTEGER,
|
||||
quantity INTEGER,
|
||||
item_id INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_rewards_claimed (
|
||||
character_id INTEGER REFERENCES public.characters(id) ON DELETE CASCADE,
|
||||
reward_id INTEGER REFERENCES public.campaign_rewards(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (character_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_state (
|
||||
id SERIAL PRIMARY KEY,
|
||||
campaign_id INTEGER REFERENCES public.campaigns(id) ON DELETE CASCADE,
|
||||
character_id INTEGER REFERENCES public.characters(id) ON DELETE CASCADE,
|
||||
code TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_codes (
|
||||
code TEXT PRIMARY KEY,
|
||||
campaign_id INTEGER REFERENCES public.campaigns(id) ON DELETE CASCADE,
|
||||
multi BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.campaign_quest (
|
||||
campaign_id INTEGER REFERENCES public.campaigns(id) ON DELETE CASCADE,
|
||||
character_id INTEGER REFERENCES public.characters(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (campaign_id, character_id)
|
||||
);
|
||||
44
server/migrations/sql/0017_diva.sql
Normal file
44
server/migrations/sql/0017_diva.sql
Normal file
@@ -0,0 +1,44 @@
|
||||
-- Diva Defense (United Defense) extended schema.
|
||||
-- Adds bead selection, per-bead point accumulation, interception points,
|
||||
-- and prize reward tables for personal and guild tracks.
|
||||
|
||||
-- Interception map data per guild (binary blob, existing column pattern).
|
||||
ALTER TABLE guilds ADD COLUMN IF NOT EXISTS interception_maps bytea;
|
||||
|
||||
-- Per-character interception points keyed by quest file ID.
|
||||
ALTER TABLE guild_characters ADD COLUMN IF NOT EXISTS interception_points jsonb NOT NULL DEFAULT '{}';
|
||||
|
||||
-- Prize reward table for personal and guild tracks.
|
||||
CREATE TABLE IF NOT EXISTS diva_prizes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
type VARCHAR(10) NOT NULL CHECK (type IN ('personal', 'guild')),
|
||||
points_req INTEGER NOT NULL,
|
||||
item_type INTEGER NOT NULL,
|
||||
item_id INTEGER NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
gr BOOLEAN NOT NULL DEFAULT false,
|
||||
repeatable BOOLEAN NOT NULL DEFAULT false
|
||||
);
|
||||
|
||||
-- Active bead types for the current Diva Defense event.
|
||||
CREATE TABLE IF NOT EXISTS diva_beads (
|
||||
id SERIAL PRIMARY KEY,
|
||||
type INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Per-character bead slot assignments with expiry.
|
||||
CREATE TABLE IF NOT EXISTS diva_beads_assignment (
|
||||
id SERIAL PRIMARY KEY,
|
||||
character_id INTEGER NOT NULL REFERENCES characters(id) ON DELETE CASCADE,
|
||||
bead_index INTEGER NOT NULL,
|
||||
expiry TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
-- Per-character bead point accumulation log.
|
||||
CREATE TABLE IF NOT EXISTS diva_beads_points (
|
||||
id SERIAL PRIMARY KEY,
|
||||
character_id INTEGER NOT NULL REFERENCES characters(id) ON DELETE CASCADE,
|
||||
bead_index INTEGER NOT NULL,
|
||||
points INTEGER NOT NULL,
|
||||
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
19
server/migrations/sql/0018_guild_invites.sql
Normal file
19
server/migrations/sql/0018_guild_invites.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- Dedicated table for guild-initiated scout invitations, separate from
|
||||
-- player-initiated applications. This gives each invitation a real serial PK
|
||||
-- so the client's InvitationID field can map to an actual database row
|
||||
-- instead of being aliased to the character ID.
|
||||
CREATE TABLE IF NOT EXISTS guild_invites (
|
||||
id serial PRIMARY KEY,
|
||||
guild_id integer REFERENCES guilds(id),
|
||||
character_id integer REFERENCES characters(id),
|
||||
actor_id integer REFERENCES characters(id),
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
-- Migrate any existing scout invitations from guild_applications.
|
||||
INSERT INTO guild_invites (guild_id, character_id, actor_id, created_at)
|
||||
SELECT guild_id, character_id, actor_id, COALESCE(created_at, now())
|
||||
FROM guild_applications
|
||||
WHERE application_type = 'invited';
|
||||
|
||||
DELETE FROM guild_applications WHERE application_type = 'invited';
|
||||
6
server/migrations/sql/0019_save_transfer.sql
Normal file
6
server/migrations/sql/0019_save_transfer.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
-- Save transfer tokens: one-time admin-granted permission for a character
|
||||
-- to receive an imported save via the API endpoint.
|
||||
-- NULL means no import is pending for this character.
|
||||
ALTER TABLE characters
|
||||
ADD COLUMN IF NOT EXISTS savedata_import_token TEXT,
|
||||
ADD COLUMN IF NOT EXISTS savedata_import_token_expiry TIMESTAMPTZ;
|
||||
1
server/migrations/sql/0020_return_guilds.sql
Normal file
1
server/migrations/sql/0020_return_guilds.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE public.guilds ADD COLUMN IF NOT EXISTS return_type SMALLINT NOT NULL DEFAULT 0;
|
||||
44
server/migrations/sql/0021_tournament.sql
Normal file
44
server/migrations/sql/0021_tournament.sql
Normal file
@@ -0,0 +1,44 @@
|
||||
CREATE TABLE IF NOT EXISTS tournaments (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(64) NOT NULL,
|
||||
start_time BIGINT NOT NULL,
|
||||
entry_end BIGINT NOT NULL,
|
||||
ranking_end BIGINT NOT NULL,
|
||||
reward_end BIGINT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tournament_cups (
|
||||
id SERIAL PRIMARY KEY,
|
||||
tournament_id INTEGER NOT NULL REFERENCES tournaments(id) ON DELETE CASCADE,
|
||||
cup_group SMALLINT NOT NULL,
|
||||
cup_type SMALLINT NOT NULL,
|
||||
unk SMALLINT NOT NULL DEFAULT 0,
|
||||
name VARCHAR(64) NOT NULL,
|
||||
description TEXT NOT NULL DEFAULT ''
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tournament_sub_events (
|
||||
id SERIAL PRIMARY KEY,
|
||||
cup_group SMALLINT NOT NULL,
|
||||
event_sub_type SMALLINT NOT NULL DEFAULT 0,
|
||||
quest_file_id INTEGER NOT NULL DEFAULT 0,
|
||||
name VARCHAR(64) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tournament_entries (
|
||||
id SERIAL PRIMARY KEY,
|
||||
char_id INTEGER NOT NULL REFERENCES characters(id) ON DELETE CASCADE,
|
||||
tournament_id INTEGER NOT NULL REFERENCES tournaments(id) ON DELETE CASCADE,
|
||||
registered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (char_id, tournament_id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tournament_results (
|
||||
id SERIAL PRIMARY KEY,
|
||||
char_id INTEGER NOT NULL REFERENCES characters(id) ON DELETE CASCADE,
|
||||
tournament_id INTEGER NOT NULL REFERENCES tournaments(id) ON DELETE CASCADE,
|
||||
event_id INTEGER NOT NULL,
|
||||
quest_slot INTEGER NOT NULL DEFAULT 0,
|
||||
stage_handle INTEGER NOT NULL DEFAULT 0,
|
||||
submitted_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
Reference in New Issue
Block a user