Skip to content

Commit

Permalink
Add Dockerfile, read raw romfs with zstd+sarc+byml
Browse files Browse the repository at this point in the history
  • Loading branch information
aquacluck committed Jul 9, 2023
1 parent 46b3b16 commit 8c4d576
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 23 deletions.
7 changes: 7 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
FROM node:18
WORKDIR /radar
COPY . .
RUN npm install && npm install typescript -g
RUN apt-get update && apt-get install -y zstd python3-pip && pip3 install sarc byml --break-system-packages
CMD ./node_modules/.bin/ts-node ./build.ts -r /romfs -e ./tools && \
npm run dev
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ A server for querying placement objects in *The Legend of Zelda: Tears of the Ki

Run build.ts to generate a map database before starting the server for the first time.

ts-node build.ts -d ../totk/Banc
ts-node build.ts -r ../totk -e tools

This assumes the `totk/Banc` directory contains the YAML data object map files
This assumes the `totk` directory contains the unaltered romfs contents.

For docker usage: `docker build -t radar .; docker run -it --rm --name radar -v /path/to/your/romfs:/romfs radar`

It's possible to build the db within docker and copy it out for the server to use, if you'd rather not install the extraction tools used in build.ts on your local machine.
3 changes: 1 addition & 2 deletions beco.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ export class Beco {
// Offsets to row data, divided by 2 and relative to the start of the row section
offsets: number[]; // u32, size num_rows
segments: BecoSegment[][]; // Rows x Segments
constructor(file: string) {
constructor(buf: Buffer) {
let little = true;
let buf = fs.readFileSync(file);
let arr = new Uint8Array(buf.byteLength);
buf.copy(arr, 0, 0, buf.byteLength);
let dv = new DataView(arr.buffer);
Expand Down
72 changes: 53 additions & 19 deletions build.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { execSync } from 'child_process';
import sqlite3 from 'better-sqlite3';
import fs from 'fs';
import yaml from 'js-yaml';
Expand All @@ -6,17 +7,17 @@ import { Beco } from './beco';

let parseArgs = require('minimist');
let argv = parseArgs(process.argv);
if (!argv.d || !argv.b || !argv.e) {
if (!argv.e || !argv.r) {
console.log("Error: Must specify paths to directories with ");
console.log(" -d Banc extracted YAML files");
console.log(" -b field map area beco files");
console.log(" -e Ecosystem json files");
console.log(" e.g. % ts-node build.ts -d path/to/Banc -b path/to/beco -e path/to/Ecosystem")
console.log(" -r Bare game romfs");
console.log(" e.g. % ts-node build.ts -r path/to/romfs -e tools")
process.exit(1);
}
const totkData = argv.d
const becoPath = argv.b;
const ecoPath = argv.e;
const romfsPath = argv.r;
const totkData = path.join(romfsPath, 'Banc');
const becoPath = path.join(romfsPath, 'Ecosystem', 'FieldMapArea');

fs.rmSync('map.db.tmp', { force: true });
const db = sqlite3('map.db.tmp');
Expand Down Expand Up @@ -85,14 +86,29 @@ const LOCATIONS = JSON.parse(fs.readFileSync('LocationMarker.json', 'utf8'))
const KOROKS = JSON.parse(fs.readFileSync('koroks_id.json', 'utf8'))
const DROP_TABLES = JSON.parse(fs.readFileSync('drop_tables.json', 'utf8'))

const BCETT_YAML_SUFFIXES = /\.bcett\.b?yml(\.zs)?$/;

const DropTableDefault = "Default";
const DROP_TYPE_ACTOR = "Actor";
const DROP_TYPE_TABLE = "Table";

const BecoGround = new Beco(path.join(becoPath, 'Ground.beco'));
const BecoMinus = new Beco(path.join(becoPath, 'MinusField.beco'));
const BecoSky = new Beco(path.join(becoPath, 'Sky.beco'));
const BecoCave = new Beco(path.join(becoPath, 'Cave.beco'));
const getZsDicPath = (function() {
// Only call these tools when we need to use them, only extract zsdics once
let zsDicPath: string = "";
return function(): string {
if (!zsDicPath) {
zsDicPath = fs.mkdtempSync('zsdicpack');
execSync(`zstd -d "${romfsPath}/Pack/ZsDic.pack.zs" -o "${zsDicPath}/ZsDic.pack"`);
execSync(`sarc x --directory "${zsDicPath}" "${zsDicPath}/ZsDic.pack"`);
}
return zsDicPath;
};
})();

const BecoGround = new Beco(readRawBeco('Ground'));
const BecoMinus = new Beco(readRawBeco('MinusField'));
const BecoSky = new Beco(readRawBeco('Sky'));
const BecoCave = new Beco(readRawBeco('Cave'));

// Should probably be yaml not json for consistency
const Ecosystem = Object.fromEntries(['Cave', 'Ground', 'MinusField', 'Sky'].map(name => {
Expand Down Expand Up @@ -192,6 +208,27 @@ function parseHash(hash: string) {
return '0x' + BigInt(hash).toString(16).padStart(16, '0');
}

function readRawBeco(name: string): Buffer {
let filePath = path.join(becoPath, name + '.beco');
if (fs.existsSync(filePath)) {
return fs.readFileSync(filePath);
} else if (fs.existsSync(filePath + '.zs')) {
return execSync(`zstd -D "${getZsDicPath()}/zs.zsdic" -d ${filePath}.zs -c`, {maxBuffer: 1073741824});
}
throw Error(`No beco file found for ${name}`);
}

function readRawYaml(filePath: string): string {
if (filePath.endsWith('.yml')) {
return fs.readFileSync(filePath, 'utf-8').toString();
} else if (filePath.endsWith('.byml')) {
return execSync(`byml_to_yml ${filePath} -`, {maxBuffer: 1073741824}).toString();
} else if (filePath.endsWith('.byml.zs')) {
return execSync(`zstd -D "${getZsDicPath()}/bcett.byml.zsdic" -d ${filePath} -c | byml_to_yml - -`, {maxBuffer: 1073741824}).toString();
}
throw Error(`No yml file found at ${filePath}`);
}

function getKorokType(hideType: number | undefined, name: string) {
if (name == 'KorokCarryProgressKeeper') {
return 'Korok Friends';
Expand All @@ -215,9 +252,7 @@ function getKorokType(hideType: number | undefined, name: string) {
function processBanc(filePath: string, mapType: string, mapName: string) {
let doc: any = null;
try {
doc = yaml.load(fs.readFileSync(filePath, 'utf-8'),
{ schema: schema }
);
doc = yaml.load(readRawYaml(filePath), { schema: schema });
} catch (e: any) {
console.log("Error: ", e);
process.exit(1);
Expand Down Expand Up @@ -443,13 +478,13 @@ function processBancs() {
const dirPath = path.join(totkData, field);
let files = fs.readdirSync(dirPath);
for (const file of files) {
if (!file.endsWith('.bcett.yml'))
if (!file.match(BCETT_YAML_SUFFIXES))
continue;
let filePath = path.join(dirPath, file);

const fieldParts = field.split("/");
let mapName = file
.replace(".bcett.yml", "")
.replace(BCETT_YAML_SUFFIXES, "")
.replace("_Static", "")
.replace("_Dynamic", "");
const mapType = fieldParts[0];
Expand All @@ -463,12 +498,12 @@ function processBancs() {
for (const mapType of ["SmallDungeon", "LargeDungeon", "NormalStage"]) {
const dirPath = path.join(totkData, mapType);
for (const file of fs.readdirSync(dirPath)) {
if (!file.endsWith('.bcett.yml'))
if (!file.match(BCETT_YAML_SUFFIXES))
continue;

const filePath = path.join(dirPath, file);
const mapName = file
.replace(".bcett.yml", "")
.replace(BCETT_YAML_SUFFIXES, "")
.replace("_Static", "")
.replace("_Dynamic", "");
processBanc(filePath, mapType, mapName);
Expand Down Expand Up @@ -496,8 +531,7 @@ function processRecycleBox() {
console.log("process recyclebox: ", filePath)
let doc: any = null;
try {
doc = yaml.load(fs.readFileSync(filePath, 'utf-8'),
{ schema: schema });
doc = yaml.load(readRawYaml(filePath), { schema: schema });
} catch (e: any) {
console.log("Error: ", e);
process.exit(1);
Expand Down

0 comments on commit 8c4d576

Please sign in to comment.