feat: add resource files archiving

This commit is contained in:
daydreamer-json
2026-04-03 20:14:50 +09:00
parent 44270fcff9
commit c815a012c0
8 changed files with 607 additions and 74 deletions

Binary file not shown.

View File

@@ -0,0 +1 @@
[]

Binary file not shown.

View File

@@ -0,0 +1 @@
[]

View File

@@ -3,6 +3,7 @@ import ky, { HTTPError } from 'ky';
import { DateTime } from 'luxon';
import PQueue from 'p-queue';
import semver from 'semver';
import type * as IResEndfield from '../types/api/akEndfield/Res.js';
import apiUtils from '../utils/api/index.js';
import argvUtils from '../utils/argv.js';
import cipher from '../utils/cipher.js';
@@ -27,6 +28,19 @@ interface MirrorFileEntry {
origStatus: boolean;
}
interface MirrorFileResEntry {
md5: string;
mirror: string;
chunk: { start: number; length: number } | null;
}
interface MirrorFileResPatchEntry {
md5Old: string;
md5New: string;
mirror: string;
chunk: { start: number; length: number } | null;
}
interface GameTarget {
name: string;
region: 'os' | 'cn';
@@ -49,6 +63,19 @@ interface AssetToMirror {
url: string;
name: string | null;
}
interface AssetToMirrorRes {
md5: string;
name: string;
size: number;
url: string;
}
interface AssetToMirrorResPatch {
md5Old: string;
md5New: string;
size: number;
url: string;
}
// Global/Shared State
const assetsToMirror: AssetToMirror[] = [];
@@ -635,6 +662,86 @@ async function fetchAndSaveLauncherProtocol(gameTargets: GameTarget[]) {
await networkQueue.onIdle();
}
async function addAllGameResVFSDataToPending(gameTargets: GameTarget[]) {
const outputDir = argvUtils.getArgv()['outputDir'];
const platforms = ['Windows', 'Android', 'iOS', 'PlayStation'] as const;
const filteredTargets = gameTargets.filter(
(t) => t.channel !== appConfig.network.api.akEndfield.channel.cnWinRelBilibili,
);
const uniqueTargets = [...new Set(filteredTargets.map((t) => t.channel))];
const dbPath = path.join(outputDir, 'mirror_file_res_list.json.zst');
const patchDbPath = path.join(outputDir, 'mirror_file_res_patch_list.json.zst');
const pendingDbPath = path.join(outputDir, 'mirror_file_res_list_pending.json');
const pendingPatchDbPath = path.join(outputDir, 'mirror_file_res_patch_list_pending.json');
if (!(await Bun.file(dbPath).exists())) await Bun.write(dbPath, Bun.zstdCompressSync('[]'));
if (!(await Bun.file(patchDbPath).exists())) await Bun.write(patchDbPath, Bun.zstdCompressSync('[]'));
if (!(await Bun.file(pendingDbPath).exists())) await Bun.write(pendingDbPath, '[]');
if (!(await Bun.file(pendingPatchDbPath).exists())) await Bun.write(pendingPatchDbPath, '[]');
const db: MirrorFileResEntry[] = JSON.parse(Bun.zstdDecompressSync(await Bun.file(dbPath).bytes()).toString('utf-8'));
const patchDb: MirrorFileResPatchEntry[] = JSON.parse(
Bun.zstdDecompressSync(await Bun.file(patchDbPath).bytes()).toString('utf-8'),
);
const pendingDb: AssetToMirrorRes[] = await Bun.file(pendingDbPath).json();
const pendingPatchDb: AssetToMirrorResPatch[] = await Bun.file(pendingPatchDbPath).json();
for (const channel of uniqueTargets) {
for (const platform of platforms) {
const apiResAllPath = path.join(
outputDir,
'akEndfield',
'launcher',
'game_resources',
String(channel),
platform,
'all.json',
);
if (!(await Bun.file(apiResAllPath).exists())) continue;
const apiResAll = ((await Bun.file(apiResAllPath).json()) as StoredData<LatestGameResourcesResponse>[])
.map((e) => e.rsp.resources)
.flat();
for (const apiResEntry of apiResAll) {
const indexJsonPath = path.join(
outputDir,
'raw',
apiResEntry.path.replace('https://', ''),
'index_' + apiResEntry.name + '_dec.json',
);
if (!(await Bun.file(indexJsonPath).exists())) continue;
const indexJson: IResEndfield.ResourceIndex = await Bun.file(indexJsonPath).json();
for (const resFile of indexJson.files) {
if (db.some((e) => e.md5 === resFile.md5)) continue;
if (pendingDb.some((e) => e.md5 === resFile.md5)) continue;
pendingDb.push({
md5: resFile.md5,
name: `VFS_${apiResEntry.version}_${resFile.md5}.${path.extname(resFile.name).slice(1)}`,
size: resFile.size,
url: `${apiResEntry.path}/${resFile.name}`,
});
}
const patchJsonPath = path.join(outputDir, 'raw', apiResEntry.path.replace('https://', ''), 'patch.json');
if (!(await Bun.file(patchJsonPath).exists())) continue;
const patchJson: IResEndfield.ResourcePatch = await Bun.file(patchJsonPath).json();
for (const file of patchJson.files) {
const md5New = file.md5;
for (const patch of file.patch.toReversed()) {
const md5Old = patch.base_md5;
const size = patch.patch_size;
const url = `${apiResEntry.path}/Patch/${patch.patch}`;
if (patchDb.some((e) => e.md5Old === md5Old && e.md5New === md5New)) continue;
if (pendingPatchDb.some((e) => e.md5Old === md5Old && e.md5New === md5New)) continue;
pendingPatchDb.push({ md5Old, md5New, size, url });
}
}
}
}
}
await Bun.write(pendingDbPath, JSON.stringify(pendingDb, null, 2));
await Bun.write(pendingPatchDbPath, JSON.stringify(pendingPatchDb, null, 2));
}
async function mainCmdHandler() {
const cfg = appConfig.network.api.akEndfield;
const gameTargets: GameTarget[] = [
@@ -700,15 +807,15 @@ async function mainCmdHandler() {
},
];
await fetchAndSaveLatestGames(gameTargets);
await fetchAndSaveLatestGamePatches(gameTargets);
await fetchAndSaveLatestGameResources(gameTargets);
await fetchAndSaveLatestWebApis(gameTargets);
await fetchAndSaveLauncherProtocol(gameTargets);
await fetchAndSaveLatestLauncher(launcherTargets);
await fetchAndSaveAllGameResRawData(gameTargets);
// await fetchAndSaveLatestGames(gameTargets);
// await fetchAndSaveLatestGamePatches(gameTargets);
// await fetchAndSaveLatestGameResources(gameTargets);
// await fetchAndSaveLatestWebApis(gameTargets);
// await fetchAndSaveLauncherProtocol(gameTargets);
// await fetchAndSaveLatestLauncher(launcherTargets);
// await fetchAndSaveAllGameResRawData(gameTargets);
await addAllGameResVFSDataToPending(gameTargets);
// Save pending assets to mirror
const outputDir = argvUtils.getArgv()['outputDir'];
const pendingPath = path.join(outputDir, 'mirror_file_list_pending.json');
const dbPath = path.join(outputDir, 'mirror_file_list.json');

View File

@@ -16,13 +16,38 @@ interface MirrorFileEntry {
origStatus: boolean;
}
interface MirrorFileResEntry {
md5: string;
mirror: string;
chunk: { start: number; length: number } | null;
}
interface MirrorFileResPatchEntry {
md5Old: string;
md5New: string;
mirror: string;
chunk: { start: number; length: number } | null;
}
interface AssetToMirror {
url: string;
name: string | null;
}
let githubAuthCfg: any = null;
let octoClient: Octokit | null = null;
interface AssetToMirrorRes {
md5: string;
name: string;
size: number;
url: string;
}
interface AssetToMirrorResPatch {
md5Old: string;
md5New: string;
size: number;
url: string;
}
const networkQueue = new PQueue({ concurrency: appConfig.threadCount.network });
const formatBytes = (size: number) =>
@@ -62,7 +87,9 @@ async function checkMirrorFileDbStatus() {
await Bun.write(dbPath, JSON.stringify(db, null, 2));
}
async function processMirrorQueue() {
async function processMirrorQueue(configAuth: any, client: Octokit) {
const owner = configAuth.github.relArchive.owner;
const repo = configAuth.github.relArchive.repo;
const outputDir = argvUtils.getArgv()['outputDir'];
const dbPath = path.join(outputDir, 'mirror_file_list.json');
const pendingPath = path.join(outputDir, 'mirror_file_list_pending.json');
@@ -82,49 +109,383 @@ async function processMirrorQueue() {
logger.info(`Processing ${pending.length} pending assets ...`);
const selectedTag = (() => {
const regexp = /github\.com\/.+?\/.+?\/releases\/download\/(.+?)\//;
for (const tag of configAuth.github.relArchive.tags) {
if (
db.filter((e) => e.mirror.match(regexp) && e.mirror.match(regexp)![1] && e.mirror.match(regexp)![1] === tag)
.length <= 997
)
return tag;
}
return false;
})();
if (!selectedTag) logger.error('GitHub tag assets file count limit reached');
for (const { url, name } of pending) {
const origUrl = stringUtils.removeQueryStr(url);
if (!db.find((e) => e.orig.includes(origUrl))) {
await githubUtils.uploadAsset(octoClient, githubAuthCfg, url, name);
if (githubAuthCfg) {
db.push({
orig: origUrl,
mirror: `https://github.com/${githubAuthCfg.github.relArchive.owner}/${githubAuthCfg.github.relArchive.repo}/releases/download/${githubAuthCfg.github.relArchive.tag}/${name ?? new URL(url).pathname.split('/').pop() ?? ''}`,
origStatus: true,
});
await Bun.write(dbPath, JSON.stringify(db, null, 2));
}
await githubUtils.uploadAsset(client, owner, repo, selectedTag, url, name);
db.push({
orig: origUrl,
mirror: `https://github.com/${owner}/${repo}/releases/download/${selectedTag}/${name ?? new URL(url).pathname.split('/').pop() ?? ''}`,
origStatus: true,
});
await Bun.write(dbPath, JSON.stringify(db, null, 2));
}
}
// Clear pending list
await Bun.write(pendingPath, JSON.stringify([], null, 2));
logger.info('Mirroring process completed and pending list cleared');
}
async function mainCmdHandler() {
const authPath = 'config/config_auth.yaml';
if (await Bun.file(authPath).exists()) {
githubAuthCfg = YAML.parse(await Bun.file(authPath).text());
logger.info('Logging in to GitHub');
octoClient = new Octokit({ auth: githubAuthCfg.github.relArchive.token });
} else {
logger.error('GitHub authentication config not found');
return;
async function processMirrorResQueue(configAuth: any, client: Octokit) {
const ghFileSizeLimit = 2 * 1024 ** 3 - 1;
const owner = configAuth.github.relArchiveRes.owner;
const repo = configAuth.github.relArchiveRes.repo;
const outputDir = argvUtils.getArgv()['outputDir'];
const dbPath = path.join(outputDir, 'mirror_file_res_list.json.zst');
const patchDbPath = path.join(outputDir, 'mirror_file_res_patch_list.json.zst');
const pendingDbPath = path.join(outputDir, 'mirror_file_res_list_pending.json');
const patchPendingDbPath = path.join(outputDir, 'mirror_file_res_patch_list_pending.json');
const db: MirrorFileResEntry[] = JSON.parse(Bun.zstdDecompressSync(await Bun.file(dbPath).bytes()).toString('utf-8'));
const patchDb: MirrorFileResPatchEntry[] = JSON.parse(
Bun.zstdDecompressSync(await Bun.file(patchDbPath).bytes()).toString('utf-8'),
);
const pendingDb: AssetToMirrorRes[] = (await Bun.file(pendingDbPath).json()) ?? [];
const validPendingDb: AssetToMirrorRes[] = [];
const newPendingDb: AssetToMirrorRes[] = [];
for (const entry of pendingDb) {
if (db.some((e) => e.md5 === entry.md5)) continue;
if (entry.size >= ghFileSizeLimit) {
logger.warn(`File size is larger than limit. Skipped: ${entry.name}`);
newPendingDb.push(entry);
continue;
}
validPendingDb.push(entry);
}
if (await githubUtils.checkIsActionRunning(githubAuthCfg)) {
if (validPendingDb.length === 0) {
logger.info('Res valid pending list is empty');
} else {
logger.info(`Processing ${validPendingDb.length} pending res ...`);
const getSelectedTag = () => {
const regexp = /github\.com\/.+?\/.+?\/releases\/download\/(.+?)\//;
for (const tag of configAuth.github.relArchiveRes.tags) {
if (
[...new Set([...db, ...patchDb].map((e) => e.mirror))].filter(
(e) => e.match(regexp) && e.match(regexp)![1] && e.match(regexp)![1] === tag,
).length <= 997
)
return tag as string;
}
return false;
};
if (!getSelectedTag()) {
logger.error('GitHub tag assets file count limit reached');
return;
}
const pendingFileChunkSizeLimit = ghFileSizeLimit;
const chunkThresholdSize = 500 * 1024 ** 2;
const pendingFileChunks = validPendingDb
.filter((e) => e.size < chunkThresholdSize)
.reduce(
(acc, item) => {
const lastChunk = acc.at(-1)!;
const currentChunkSize = lastChunk.reduce((sum, i) => sum + i.size, 0);
if (currentChunkSize + item.size <= pendingFileChunkSizeLimit) {
lastChunk.push(item);
} else {
acc.push([item]);
}
return acc;
},
[[]] as AssetToMirrorRes[][],
);
if (pendingFileChunks.length === 1 && pendingFileChunks[0]!.length === 0) {
logger.info('Chunk upload skipped');
await Bun.write(pendingDbPath, JSON.stringify(validPendingDb, null, 2));
} else {
for (const chunk of pendingFileChunks) {
const buffers: { index: number; data: Uint8Array }[] = [];
console.log('');
chunk.forEach((e, index) => {
networkQueue.add(async () => {
const data = await ky
.get(e.url, {
headers: { 'User-Agent': appConfig.network.userAgent.minimum },
timeout: appConfig.network.timeout,
retry: { limit: appConfig.network.retryCount },
})
.bytes();
buffers.push({ index, data });
process.stdout.write('\x1b[1A\x1b[2K');
logger.trace(
`Downloaded: ${buffers.length.toString().padStart(chunk.length.toString().length, ' ')} / ${chunk.length}, ${new URL(e.url).pathname.split('/').at(-1)}, ${formatBytes(data.length)}`,
);
});
});
await networkQueue.onIdle();
buffers.sort((a, b) => a.index - b.index);
const chunkTotalSize = mathUtils.arrayTotal(buffers.map((e) => e.data.length));
const combinedBuffer = new Uint8Array(chunkTotalSize);
let offset = 0;
for (const item of buffers) {
combinedBuffer.set(item.data, offset);
offset += item.data.length;
}
const combinedBufferMd5 = new Bun.CryptoHasher('md5').update(combinedBuffer).digest('hex');
const chunkFileName = `VFS_Chunk_${combinedBufferMd5}.bin`;
if (getSelectedTag() === false) throw new Error('GitHub tag assets file count limit reached');
await githubUtils.uploadAssetWithBuffer(
client,
owner,
repo,
getSelectedTag() as string,
chunkFileName,
combinedBuffer,
);
offset = 0;
for (const item of chunk) {
db.push({
md5: item.md5,
mirror: `https://github.com/${owner}/${repo}/releases/download/${getSelectedTag()}/${chunkFileName}`,
chunk: { start: offset, length: item.size },
});
offset += item.size;
}
await Bun.write(dbPath, Bun.zstdCompressSync(JSON.stringify(db), { level: 16 }));
}
}
const bigFiles = validPendingDb.filter((e) => e.size >= chunkThresholdSize);
await Bun.write(pendingDbPath, JSON.stringify([...newPendingDb, ...bigFiles], null, 2));
{
if (bigFiles.length > 0) logger.info('Processing big pending res ...');
networkQueue.concurrency = 4;
for (const file of bigFiles) {
networkQueue.add(async () => {
const buffer: Uint8Array = await ky
.get(file.url, {
headers: { 'User-Agent': appConfig.network.userAgent.minimum },
timeout: appConfig.network.timeout,
retry: { limit: appConfig.network.retryCount },
})
.bytes();
logger.trace('Downloaded: ' + file.name);
if (getSelectedTag() === false) throw new Error('GitHub tag assets file count limit reached');
await githubUtils.uploadAssetWithBuffer(client, owner, repo, getSelectedTag() as string, file.name, buffer);
db.push({
md5: file.md5,
mirror: `https://github.com/${owner}/${repo}/releases/download/${getSelectedTag()}/${file.name}`,
chunk: null,
});
await Bun.write(dbPath, Bun.zstdCompressSync(JSON.stringify(db), { level: 16 }));
});
}
await networkQueue.onIdle();
networkQueue.concurrency = appConfig.threadCount.network;
}
}
await Bun.write(pendingDbPath, JSON.stringify([...newPendingDb], null, 2));
}
async function processMirrorResPatchQueue(configAuth: any, client: Octokit) {
const ghFileSizeLimit = 2 * 1024 ** 3 - 1;
const owner = configAuth.github.relArchiveRes.owner;
const repo = configAuth.github.relArchiveRes.repo;
const outputDir = argvUtils.getArgv()['outputDir'];
const dbPath = path.join(outputDir, 'mirror_file_res_list.json.zst');
const patchDbPath = path.join(outputDir, 'mirror_file_res_patch_list.json.zst');
const pendingDbPath = path.join(outputDir, 'mirror_file_res_patch_list_pending.json');
if (!(await Bun.file(pendingDbPath).exists())) return;
const db: MirrorFileResEntry[] = (await Bun.file(dbPath).exists())
? JSON.parse(Bun.zstdDecompressSync(await Bun.file(dbPath).bytes()).toString('utf-8'))
: [];
const patchDb: MirrorFileResPatchEntry[] = (await Bun.file(patchDbPath).exists())
? JSON.parse(Bun.zstdDecompressSync(await Bun.file(patchDbPath).bytes()).toString('utf-8'))
: [];
const pendingDb: AssetToMirrorResPatch[] = (await Bun.file(pendingDbPath).json()) ?? [];
const validPendingDb: AssetToMirrorResPatch[] = [];
const newPendingDb: AssetToMirrorResPatch[] = [];
for (const entry of pendingDb) {
if (patchDb.some((e) => e.md5Old === entry.md5Old && e.md5New === entry.md5New)) continue;
if (entry.size >= ghFileSizeLimit) {
logger.warn(`File size is larger than limit. Skipped patch: ${entry.md5Old} -> ${entry.md5New}`);
newPendingDb.push(entry);
continue;
}
validPendingDb.push(entry);
}
if (validPendingDb.length === 0) {
logger.info('Res patch valid pending list is empty');
} else {
logger.info(`Processing ${validPendingDb.length} pending res patches ...`);
const getSelectedTag = () => {
const regexp = /github\.com\/.+?\/.+?\/releases\/download\/(.+?)\//;
for (const tag of configAuth.github.relArchiveRes.tags) {
if (
[...new Set([...db, ...patchDb].map((e) => e.mirror))].filter(
(e) => e.match(regexp) && e.match(regexp)![1] && e.match(regexp)![1] === tag,
).length <= 997
)
return tag as string;
}
return false;
};
if (!getSelectedTag()) {
logger.error('GitHub tag assets file count limit reached');
return;
}
const chunkThresholdSize = 500 * 1024 ** 2;
const pendingFileChunks = validPendingDb
.filter((e) => e.size < chunkThresholdSize)
.reduce(
(acc, item) => {
const lastChunk = acc.at(-1)!;
const currentChunkSize = lastChunk.reduce((sum, i) => sum + i.size, 0);
if (currentChunkSize + item.size <= ghFileSizeLimit) {
lastChunk.push(item);
} else {
acc.push([item]);
}
return acc;
},
[[]] as AssetToMirrorResPatch[][],
);
if (pendingFileChunks.length === 1 && pendingFileChunks[0]!.length === 0) {
logger.info('Patch chunk upload skipped');
} else {
for (const chunk of pendingFileChunks) {
const buffers: { index: number; data: Uint8Array }[] = [];
console.log('');
chunk.forEach((e, index) => {
networkQueue.add(async () => {
const data = await ky
.get(e.url, {
headers: { 'User-Agent': appConfig.network.userAgent.minimum },
timeout: appConfig.network.timeout,
retry: { limit: appConfig.network.retryCount },
})
.bytes();
buffers.push({ index, data });
process.stdout.write('\x1b[1A\x1b[2K');
logger.trace(
`Downloaded Patch: ${buffers.length.toString().padStart(chunk.length.toString().length, ' ')} / ${chunk.length}, ${e.md5Old.slice(0, 8)}... -> ${e.md5New.slice(0, 8)}..., ${formatBytes(data.length)}`,
);
});
});
await networkQueue.onIdle();
buffers.sort((a, b) => a.index - b.index);
const combinedBuffer = new Uint8Array(mathUtils.arrayTotal(buffers.map((e) => e.data.length)));
let offset = 0;
for (const item of buffers) {
combinedBuffer.set(item.data, offset);
offset += item.data.length;
}
const combinedBufferMd5 = new Bun.CryptoHasher('md5').update(combinedBuffer).digest('hex');
const chunkFileName = `VFS_Patch_Chunk_${combinedBufferMd5}.bin`;
const tag = getSelectedTag();
if (!tag) throw new Error('GitHub tag assets file count limit reached');
await githubUtils.uploadAssetWithBuffer(client, owner, repo, tag, chunkFileName, combinedBuffer);
offset = 0;
for (const item of chunk) {
patchDb.push({
md5Old: item.md5Old,
md5New: item.md5New,
mirror: `https://github.com/${owner}/${repo}/releases/download/${tag}/${chunkFileName}`,
chunk: { start: offset, length: item.size },
});
offset += item.size;
}
await Bun.write(patchDbPath, Bun.zstdCompressSync(JSON.stringify(patchDb), { level: 16 }));
}
}
const bigFiles = validPendingDb.filter((e) => e.size >= chunkThresholdSize);
if (bigFiles.length > 0) {
logger.info('Processing big pending patches ...');
networkQueue.concurrency = 4;
for (const file of bigFiles) {
networkQueue.add(async () => {
const buffer = await ky
.get(file.url, {
headers: { 'User-Agent': appConfig.network.userAgent.minimum },
timeout: appConfig.network.timeout,
retry: { limit: appConfig.network.retryCount },
})
.bytes();
logger.trace(`Downloaded Patch: ${file.md5Old} -> ${file.md5New}`);
const tag = getSelectedTag();
if (!tag) throw new Error('GitHub tag assets file count limit reached');
const fileName = `VFS_Patch_${file.md5Old}_${file.md5New}.bin`;
await githubUtils.uploadAssetWithBuffer(client, owner, repo, tag, fileName, buffer);
patchDb.push({
md5Old: file.md5Old,
md5New: file.md5New,
mirror: `https://github.com/${owner}/${repo}/releases/download/${tag}/${fileName}`,
chunk: null,
});
await Bun.write(patchDbPath, Bun.zstdCompressSync(JSON.stringify(patchDb), { level: 16 }));
});
}
await networkQueue.onIdle();
networkQueue.concurrency = appConfig.threadCount.network;
}
}
await Bun.write(pendingDbPath, JSON.stringify([...newPendingDb], null, 2));
}
async function mainCmdHandler() {
const authPath = 'config/config_auth.yaml';
if (!(await Bun.file(authPath).exists())) {
logger.error('Config auth not found');
return;
}
const configAuth = YAML.parse(await Bun.file(authPath).text());
const clients = {
main: new Octokit({ auth: configAuth.github.main.token }),
relArchive: new Octokit({ auth: configAuth.github.relArchive.token }),
relArchiveRes: new Octokit({ auth: configAuth.github.relArchiveRes.token }),
};
logger.info('Logged in to GitHub');
if (await githubUtils.checkIsActionRunning(clients.main, configAuth.github.main.owner, configAuth.github.main.repo)) {
logger.error('Duplicate execution detected (GitHub Action is already running)');
return;
}
await checkMirrorFileDbStatus();
await processMirrorQueue();
await processMirrorQueue(configAuth, clients.relArchive);
await processMirrorResQueue(configAuth, clients.relArchiveRes);
await processMirrorResPatchQueue(configAuth, clients.relArchiveRes);
const relInfo = await githubUtils.getReleaseInfo(octoClient, githubAuthCfg);
if (relInfo) {
logger.info(`GitHub Releases total size: ${formatBytes(mathUtils.arrayTotal(relInfo.assets.map((a) => a.size)))}`);
}
// const relInfo = await githubUtils.getReleaseInfo(octoClient, githubAuthCfg);
// if (relInfo) {
// logger.info(`GitHub Releases total size: ${formatBytes(mathUtils.arrayTotal(relInfo.assets.map((a) => a.size)))}`);
// }
}
export default mainCmdHandler;

View File

@@ -0,0 +1,33 @@
export interface ResourceIndex {
isInitial: boolean;
files: {
index: number;
name: string;
hash: string | null;
size: number;
type: number; // C# enum?
md5: string;
urlPath: any;
manifest: number; // ???
}[];
types: any; // ???
version: any; // ???
rebootVersion: string; // ???
}
export interface ResourcePatch {
version: string; // 6331530-16
files: {
name: string; // 0CE8FA57/8A8746477A4254C6069BCC7124B229A2.chk (new file)
md5: string; // 4cd56084739f5cf92540ae9bb988e90a (new file)
size: number; // 205884826 (new file)
diffType: number; // 1
patch: {
base_file: string; // 0CE8FA57/FA0DF58E1E98B5137A6A28DA9AD04ECF.chk (old file)
base_md5: string; // 4d0cf13a06886c2d40d7dced64f01025 (old file)
base_size: number; // 205875376 (old file)
patch: string; // diff_6331530-16_5961872-11/0CE8FA57_8A8746477A4254C6069BCC7124B229A2.chk_patch
patch_size: number; // 137279
}[];
}[];
}

View File

@@ -4,18 +4,14 @@ import appConfig from './config.js';
import logger from './logger.js';
async function uploadAsset(
client: Octokit | null,
authCfg: {
github: {
relArchive: { token: string; owner: string; repo: string; tag: string };
main: { token: string; owner: string; repo: string };
};
} | null,
client: Octokit,
owner: string,
repo: string,
tag: string,
url: string,
targetFileName: string | null,
) {
if (!client || !authCfg) return;
const release = await getReleaseInfo(client, authCfg);
const release = await getReleaseInfo(client, owner, repo, tag);
if (!release) throw new Error('GH release not found');
const releaseId = release.id;
@@ -25,53 +21,87 @@ async function uploadAsset(
const binSize: number = bin.byteLength;
logger.info(`Mirror archive: Uploading ${new URL(url).pathname.split('/').pop()} ...`);
await client.rest.repos.uploadReleaseAsset({
owner: authCfg.github.relArchive.owner,
repo: authCfg.github.relArchive.repo,
owner,
repo,
release_id: releaseId,
name,
data: bin as any,
headers: { 'content-length': binSize },
});
return true;
}
async function getReleaseInfo(
client: Octokit | null,
authCfg: {
github: {
relArchive: { token: string; owner: string; repo: string; tag: string };
main: { token: string; owner: string; repo: string };
};
} | null,
async function uploadAssetWithBuffer(
client: Octokit,
owner: string,
repo: string,
tag: string,
targetFileName: string,
buffer: Uint8Array,
) {
if (!client || !authCfg) return;
const { data: release } = await client.rest.repos.getReleaseByTag({
owner: authCfg.github.relArchive.owner,
repo: authCfg.github.relArchive.repo,
tag: authCfg.github.relArchive.tag,
const release = await getReleaseInfo(client, owner, repo, tag);
if (!release) throw new Error('GH release not found');
const releaseId = release.id;
logger.info(`Mirror archive: Uploading to ${tag}, ${targetFileName} ...`);
await client.rest.repos.uploadReleaseAsset({
owner,
repo,
release_id: releaseId,
name: targetFileName,
data: buffer as any,
headers: { 'content-length': buffer.byteLength },
});
return true;
}
async function getReleaseInfo(client: Octokit, owner: string, repo: string, tag: string) {
const { data: release } = await client.rest.repos.getReleaseByTag({ owner, repo, tag });
return release;
}
async function checkIsActionRunning(
authCfg: {
github: {
relArchive: { token: string; owner: string; repo: string; tag: string };
main: { token: string; owner: string; repo: string };
};
} | null,
): Promise<boolean> {
if (!authCfg) return false;
async function checkIsActionRunning(client: Octokit, owner: string, repo: string): Promise<boolean> {
logger.debug('Checking GitHub Actions running status ...');
const client = new Octokit({ auth: authCfg.github.main.token });
const data = await client.rest.actions.listWorkflowRunsForRepo({
owner: authCfg.github.main.owner,
repo: authCfg.github.main.repo,
});
const data = await client.rest.actions.listWorkflowRunsForRepo({ owner, repo });
return data.data.workflow_runs.filter((e) => e.status === 'in_progress').length > 1;
}
async function createNewRelease(
client: Octokit,
owner: string,
repo: string,
tag: string,
title: string,
note: string,
preRelFlag: boolean,
draftFlag: boolean = false,
targetCommitish: string = 'main',
) {
const { data } = await client.rest.repos.createRelease({
owner,
repo,
tag_name: tag,
name: title,
body: note,
draft: draftFlag,
prerelease: preRelFlag,
target_commitish: targetCommitish,
});
return data;
}
async function deleteReleaseTag(client: Octokit, owner: string, repo: string, tag: string) {
const { data: release } = await client.rest.repos.getReleaseByTag({ owner, repo, tag });
await client.rest.repos.deleteRelease({ owner, repo, release_id: release.id });
const data = await client.rest.git.deleteRef({ owner, repo, ref: `tags/${tag}` });
return data;
}
export default {
uploadAsset,
uploadAssetWithBuffer,
getReleaseInfo,
checkIsActionRunning,
createNewRelease,
deleteReleaseTag,
};