Skip to content

Commit 97cbb71

Browse files
arbronFyorl
andauthored
[#46] Extract documents within adventures into separate files (#61)
Co-authored-by: fyorl <kim.mantas@gmail.com>
1 parent c2fe36c commit 97cbb71

File tree

4 files changed

+158
-22
lines changed

4 files changed

+158
-22
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
- Attempting to unpack a directory that is not a valid LevelDB database now throws an error.
55
- (BoltsJ) Added the `--config` command-line flag. Configuration options read from this file will be merged with the global `.fvttrc.yml` configuration options.
66
- (Jeff Hitchcock) Added the `--folders` command-line flag, and corresponding `folders` parameter to `extractPack`. When used, this option writes the pack's entries to a directory structure matching the pack's internal Folder document structure.
7+
- (Jeff Hitchcock) Added the `--expandAdventures` command-line flags, and corresponding `expandAdventures` parameter to `extractPack`. When used, this option writes each Adventure document's embedded documents to their own files.
78

89
### Fixes
910
- (Jakob Törmä) Fixed launch command assuming electron directory structure.

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,7 @@ Extract the contents of a compendium pack into individual source files for each
213213
* **transformEntry:** *(entry: object): Promise<false|void>* A function that is called on every entry. Returning *false* indicates that the entry should be discarded.
214214
* **transformName:** *(entry: object): Promise<string|void>* A function that is called on every entry. The value returned from this will be used as the entry's filename and must include the appropriate file extension. If nothing is returned, an auto-generated name will be used instead.
215215
* **transformFolderName:** *(entry: object): Promise<string|void>* A function used to generate a directory name for an extracted Folder document when the `folders` option is used.
216+
* **expandAdventures:** *boolean* Write documents emebdded in Adventures to their own files. If the `folders` option is also supplied, the Adventure is treated like a folder, and written to `_Adventure.{yml|json}` instead of `_Folder.{yml|json}`.
216217
* **jsonOptions:** *object*
217218
* **replacer:** *(key: string, value: any): any|Array<string|number>* A replacer function or an array of property names in the object to include in the resulting string.
218219
* **space:** *string|number* A number of spaces or a string to use as indentation.

commands/package.mjs

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,8 @@ import { compilePack, extractPack, TYPE_COLLECTION_MAP } from "../lib/package.mj
2828
* @property {boolean} [clean] When unpacking, delete the destination directory first.
2929
* @property {boolean} [folders] When unpacking, create a directory structure that matches the
3030
* compendium folders.
31+
* @property {boolean} [expandAdventures] When unpacking, extract adventure documents into a folder with
32+
* each contained document as its own entry in a folder.
3133
*/
3234

3335
/**
@@ -132,6 +134,11 @@ export function getCommand() {
132134
type: "boolean"
133135
});
134136

137+
yargs.option("expandAdventures", {
138+
describe: "When unpacking, extract documents embedded inside Adventures to their own files. If supplied alongside the --folders option, the Adventure is treated like a folder.",
139+
type: "boolean"
140+
});
141+
135142
return yargs;
136143
},
137144
handler: async argv => {
@@ -381,7 +388,7 @@ async function handleUnpack(argv) {
381388
}
382389

383390
let documentType;
384-
const { nedb, yaml, clean, folders } = argv;
391+
const { nedb, yaml, clean, folders, expandAdventures } = argv;
385392
if ( nedb ) {
386393
documentType = determineDocumentType(pack, argv);
387394
if ( !documentType ) {
@@ -401,7 +408,7 @@ async function handleUnpack(argv) {
401408
console.log(`[${dbMode}] Unpacking "${chalk.blue(pack)}" to "${chalk.blue(source)}"`);
402409

403410
try {
404-
await extractPack(pack, source, { nedb, yaml, documentType, clean, folders, log: true });
411+
await extractPack(pack, source, { nedb, yaml, documentType, clean, folders, expandAdventures, log: true });
405412
} catch ( err ) {
406413
console.error(err);
407414
process.exitCode = 1;

lib/package.mjs

Lines changed: 147 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ import { ClassicLevel } from "classic-level";
4646
* @property {DocumentType} [documentType] Required only for NeDB packs in order to generate a correct key.
4747
* @property {boolean} [clean] Delete the destination directory before unpacking.
4848
* @property {boolean} [folders] Create a directory structure that matches the compendium folders.
49+
* @property {boolean} [expandAdventures] Write documents embedded in Adventures to their own files. If the
50+
* folders option is also supplied, the Adventure is treated like a
51+
* folder.
4952
* @property {DocumentCollection} [collection] Required only for NeDB packs in order to generate a correct key.
5053
* Can be used instead of documentType if known.
5154
* @property {NameTransformer} [transformName] A function that is used to generate a filename for the extracted
@@ -64,6 +67,12 @@ import { ClassicLevel } from "classic-level";
6467
* @property {string|number} [space] A number of spaces or a string to use as indentation.
6568
*/
6669

70+
/**
71+
* @typedef FolderDescriptor
72+
* @property {string} name The folder's filename.
73+
* @property {string} [folder] A parent folder ID.
74+
*/
75+
6776
/**
6877
* @callback JSONReplacer
6978
* @param {string} key The key being stringified.
@@ -73,17 +82,25 @@ import { ClassicLevel } from "classic-level";
7382

7483
/**
7584
* @callback EntryTransformer
76-
* @param {object} entry The entry data.
77-
* @returns {Promise<false|void>} Return boolean false to indicate that this entry should be discarded.
85+
* @param {object} entry The entry data.
86+
* @param {TransformerContext} [context] Optional context information for the document being transformed.
87+
* @returns {Promise<false|void>} Return boolean false to indicate that this entry should be discarded.
7888
*/
7989

8090
/**
8191
* @callback NameTransformer
82-
* @param {object} entry The entry data.
83-
* @param {object} [context]
84-
* @param {string} [context.folder] Folder path if this entry is in a folder and the folders option is enabled.
85-
* @returns {Promise<string|void>} If a string is returned, it is used as the filename that the entry will be written
86-
* to.
92+
* @param {object} entry The entry data.
93+
* @param {TransformerContext} [context] Optional context information for the document being transformed.
94+
* @returns {Promise<string|void>} If a string is returned, it is used as the filename that the entry will
95+
* be written to.
96+
*/
97+
98+
/**
99+
* @typedef TransformerContext
100+
* @property {object} [adventure] Data on an adventure if document is stored within an adventure.
101+
* @property {object} [adventure.doc] The entire adventure document.
102+
* @property {string} [adventure.path] The path where the adventure will be extracted.
103+
* @property {string} [folder] Folder path if this entry is in a folder and the folders option is enabled.
87104
*/
88105

89106
/**
@@ -154,6 +171,14 @@ const HIERARCHY = {
154171
}
155172
};
156173

174+
/**
175+
* Document collections stored with adventure documents.
176+
* @type {string[]}
177+
*/
178+
const ADVENTURE_DOCS = [
179+
"actors", "cards", "combats", "folders", "items", "journal", "playlists", "scenes", "tables", "macros"
180+
];
181+
157182
/**
158183
* A mapping of primary document types to collection names.
159184
* @type {Record<DocumentType, DocumentCollection>}
@@ -221,7 +246,7 @@ async function compileNedb(pack, files, { log, transformEntry }={}) {
221246
const seenKeys = new Set();
222247
const packDoc = applyHierarchy(doc => {
223248
if ( seenKeys.has(doc._key) ) {
224-
throw new Error(`An entry with key '${key}' was already packed and would be overwritten by this entry.`);
249+
throw new Error(`An entry with key '${doc._key}' was already packed and would be overwritten by this entry.`);
225250
}
226251
seenKeys.add(doc._key);
227252
delete doc._key;
@@ -234,6 +259,10 @@ async function compileNedb(pack, files, { log, transformEntry }={}) {
234259
const ext = path.extname(file);
235260
const isYaml = ext === ".yml" || ext === ".yaml";
236261
const doc = isYaml ? YAML.load(contents) : JSON.parse(contents);
262+
if ( !doc._key ) continue;
263+
if ( doc._key.startsWith("!adventures") ) await reconstructAdventure(path.dirname(file), doc, {
264+
transformEntry, log
265+
});
237266
const key = doc._key;
238267
const [, collection] = key.split("!");
239268
// If the key starts with !folders, we should skip packing it as NeDB doesn't support folders.
@@ -290,6 +319,10 @@ async function compileClassicLevel(pack, files, { log, transformEntry }={}) {
290319
const ext = path.extname(file);
291320
const isYaml = ext === ".yml" || ext === ".yaml";
292321
const doc = isYaml ? YAML.load(contents) : JSON.parse(contents);
322+
if ( !doc._key ) continue;
323+
if ( doc._key.startsWith("!adventures") ) await reconstructAdventure(path.dirname(file), doc, {
324+
transformEntry, log
325+
});
293326
const [, collection] = doc._key.split("!");
294327
if ( await transformEntry?.(doc) === false ) continue;
295328
await packDoc(doc, collection);
@@ -315,6 +348,40 @@ async function compileClassicLevel(pack, files, { log, transformEntry }={}) {
315348

316349
/* -------------------------------------------- */
317350

351+
/**
352+
* Collect any documents linked within an adventure.
353+
* @param {string} src The Adventure document's source directory.
354+
* @param {object} doc Adventure document being reconstructed.
355+
* @param {Partial<PackageOptions>} [options]
356+
* @returns {Promise<void>}
357+
*/
358+
async function reconstructAdventure(src, doc, { transformEntry, log }={}) {
359+
const context = { adventure: doc };
360+
for ( const embeddedCollectionName of ADVENTURE_DOCS ) {
361+
const entries = [];
362+
for ( let entry of doc[embeddedCollectionName] ?? [] ) {
363+
if ( typeof entry === "string" ) {
364+
const file = path.join(src, entry);
365+
let contents;
366+
try {
367+
contents = fs.readFileSync(file, "utf8");
368+
} catch ( err ) {
369+
if ( log ) console.error(`Failed to pack ${chalk.red(file)} as part of Adventure reconstruction.`);
370+
throw err;
371+
}
372+
const ext = path.extname(file);
373+
const isYaml = ext === ".yml" || ext === ".yaml";
374+
entry = isYaml ? YAML.load(contents) : JSON.parse(contents);
375+
if ( await transformEntry?.(entry, context) === false ) continue;
376+
}
377+
entries.push(entry);
378+
}
379+
doc[embeddedCollectionName] = entries;
380+
}
381+
}
382+
383+
/* -------------------------------------------- */
384+
318385
/**
319386
* Flushes the log of the given database to create compressed binary tables.
320387
* @param {ClassicLevel} db The database to compress.
@@ -346,7 +413,7 @@ async function compactClassicLevel(db) {
346413
*/
347414
export async function extractPack(src, dest, {
348415
nedb=false, yaml=false, yamlOptions={}, jsonOptions={}, log=false, documentType, collection, clean, folders,
349-
transformEntry, transformName, transformFolderName
416+
expandAdventures, transformEntry, transformName, transformFolderName
350417
}={}) {
351418
if ( nedb && (path.extname(src) !== ".db") ) {
352419
throw new Error("The nedb option was passed to extractPacks, but the target pack does not have a .db extension.");
@@ -362,7 +429,7 @@ export async function extractPack(src, dest, {
362429
return extractNedb(src, dest, { yaml, yamlOptions, jsonOptions, log, collection, transformEntry, transformName });
363430
}
364431
return extractClassicLevel(src, dest, {
365-
yaml, log, yamlOptions, jsonOptions, folders, transformEntry, transformName, transformFolderName
432+
yaml, log, yamlOptions, jsonOptions, folders, expandAdventures, transformEntry, transformName, transformFolderName
366433
});
367434
}
368435

@@ -413,26 +480,26 @@ async function extractNedb(pack, dest, {
413480
* @returns {Promise<void>}
414481
*/
415482
async function extractClassicLevel(pack, dest, {
416-
yaml, yamlOptions, jsonOptions, log, folders, transformEntry, transformName, transformFolderName
417-
}) {
483+
yaml, yamlOptions, jsonOptions, log, folders, expandAdventures, transformEntry, transformName, transformFolderName
484+
}={}) {
418485
// Load the directory as a ClassicLevel DB.
419486
const db = new ClassicLevel(pack, { keyEncoding: "utf8", valueEncoding: "json", createIfMissing: false });
420487

421488
// Build up the folder structure
489+
const folderMap = new Map();
422490
if ( folders ) {
423-
folders = new Map();
424491
for await ( const [key, doc] of db.iterator() ) {
425492
if ( !key.startsWith("!folders") ) continue;
426493
let name = await transformFolderName?.(doc);
427494
if ( !name ) name = doc.name ? `${getSafeFilename(doc.name)}_${doc._id}` : key;
428-
folders.set(doc._id, { name, folder: doc.folder });
495+
folderMap.set(doc._id, { name, folder: doc.folder });
429496
}
430-
for ( const folder of folders.values() ) {
431-
let parent = folders.get(folder.folder);
497+
for ( const folder of folderMap.values() ) {
498+
let parent = folderMap.get(folder.folder);
432499
folder.path = folder.name;
433500
while ( parent ) {
434501
folder.path = path.join(parent.name, folder.path);
435-
parent = folders.get(parent.folder);
502+
parent = folderMap.get(parent.folder);
436503
}
437504
}
438505
}
@@ -453,11 +520,17 @@ async function extractClassicLevel(pack, dest, {
453520
if ( collection.includes(".") ) continue; // This is not a primary document, skip it.
454521
await unpackDoc(doc, collection);
455522
if ( await transformEntry?.(doc) === false ) continue;
456-
const folder = folders?.get(doc.folder)?.path;
523+
if ( key.startsWith("!adventures") && expandAdventures ) {
524+
await extractAdventure(doc, dest, { folderMap }, {
525+
yaml, yamlOptions, jsonOptions, log, folders, transformEntry, transformName
526+
});
527+
continue;
528+
}
529+
const folder = folderMap.get(doc.folder)?.path;
457530
let name = await transformName?.(doc, { folder });
458531
if ( !name ) {
459-
if ( key.startsWith("!folders") && folders?.has(doc._id) ) {
460-
const folder = folders.get(doc._id);
532+
if ( key.startsWith("!folders") && folderMap.has(doc._id) ) {
533+
const folder = folderMap.get(doc._id);
461534
name = path.join(folder.name, `_Folder.${yaml ? "yml" : "json"}`);
462535
} else {
463536
name = `${doc.name ? `${getSafeFilename(doc.name)}_${id}` : key}.${yaml ? "yml" : "json"}`;
@@ -472,6 +545,60 @@ async function extractClassicLevel(pack, dest, {
472545
await db.close();
473546
}
474547

548+
/* -------------------------------------------- */
549+
550+
/**
551+
* Split an adventure document into separate files.
552+
* @param {object} doc The Document being operated on.
553+
* @param {string} dest The root output directory.
554+
* @param {object} [adventureOptions] Options to configure adventure extraction behavior.
555+
* @param {Map<string, FolderDescriptor>} [adventureOptions.folderMap] Folder hierarchy.
556+
* @param {Partial<ExtractOptions>} [extractOptions] Options to configure serialization behavior.
557+
*/
558+
async function extractAdventure(doc, dest, { folderMap }={}, {
559+
yaml, yamlOptions, jsonOptions, log, folders, transformEntry, transformName
560+
}={}) {
561+
let adventureFolder;
562+
563+
// Prepare name for the adventure
564+
const folder = folderMap?.get(doc.folder)?.path;
565+
let name = await transformName?.(doc, { folder });
566+
adventureFolder = folders ? path.join(folder ?? "", `${getSafeFilename(doc.name)}_${doc._id}`) : folder;
567+
if ( !name ) {
568+
if ( folders ) {
569+
name = path.join(adventureFolder, `_Adventure.${yaml ? "yml" : "json"}`);
570+
} else {
571+
name = `${doc.name ? `${getSafeFilename(doc.name)}_${doc._id}` : doc._id}.${yaml ? "yml" : "json"}`;
572+
if ( folder ) name = path.join(folder, name);
573+
}
574+
}
575+
576+
// Write all documents contained in the adventure
577+
const context = { adventure: { doc, path: name } };
578+
for ( const embeddedCollectionName of ADVENTURE_DOCS ) {
579+
const paths = [];
580+
for ( const embeddedDoc of doc[embeddedCollectionName] ?? [] ) {
581+
if ( await transformEntry?.(embeddedDoc, context) === false ) continue;
582+
let embeddedName = await transformName?.(embeddedDoc, context);
583+
if ( !embeddedName ) {
584+
const { name, _id: id } = embeddedDoc;
585+
embeddedName = `${name ? `${getSafeFilename(name)}_${id}` : doc._id}.${yaml ? "yml" : "json"}`;
586+
if ( adventureFolder ) embeddedName = path.join(adventureFolder, embeddedName);
587+
}
588+
const filename = path.join(dest, embeddedName);
589+
paths.push(path.basename(embeddedName));
590+
serializeDocument(embeddedDoc, filename, { yaml, yamlOptions, jsonOptions });
591+
if ( log ) console.log(`Wrote ${chalk.blue(embeddedName)}`);
592+
}
593+
doc[embeddedCollectionName] = paths;
594+
}
595+
596+
// Write the adventure itself
597+
const filename = path.join(dest, name);
598+
serializeDocument(doc, filename, { yaml, yamlOptions, jsonOptions });
599+
if ( log ) console.log(`Wrote ${chalk.blue(name)}`);
600+
}
601+
475602
/* -------------------------------------------- */
476603
/* Utilities */
477604
/* -------------------------------------------- */

0 commit comments

Comments
 (0)