Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ability to pack objects rather than just files #54

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 27 additions & 16 deletions lib/package.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ import { ClassicLevel } from "classic-level";
* @typedef {PackageOptions} CompileOptions
* @property {boolean} [recursive=false] Whether to recurse into child directories to locate source files, otherwise
* only source files located in the root directory will be used.
* @property {boolean} [filesAreObjects] Rather than a list of file names, an iterable object is supplied where each
* item is the object to be packed.
*/

/**
Expand Down Expand Up @@ -175,7 +177,7 @@ export const TYPE_COLLECTION_MAP = {

/**
* Compile source files into a compendium pack.
* @param {string} src The directory containing the source files.
* @param {string|Iterable<Object>} src The directory containing the source files, or an iterable of existing objects.
* @param {string} dest The target compendium pack. This should be a directory for LevelDB packs, or a .db file for
* NeDB packs.
* @param {CompileOptions} [options]
Expand All @@ -187,9 +189,10 @@ export async function compilePack(src, dest, {
if ( nedb && (path.extname(dest) !== ".db") ) {
throw new Error("The nedb option was passed to compilePacks, but the target pack does not have a .db extension.");
}
const files = findSourceFiles(src, { yaml, recursive });
if ( nedb ) return compileNedb(dest, files, { log, transformEntry });
return compileClassicLevel(dest, files, { log, transformEntry });
const isDirectory = typeof (src) === "string" || src instanceof String;
const files = isDirectory ? findSourceFiles(src, { yaml, recursive }) : src;
if ( nedb ) return compileNedb(dest, files, { log, transformEntry, filesAreObjects: !isDirectory });
return compileClassicLevel(dest, files, { log, transformEntry, filesAreObjects: !isDirectory });
}

/* -------------------------------------------- */
Expand All @@ -198,10 +201,10 @@ export async function compilePack(src, dest, {
* Compile a set of files into a NeDB compendium pack.
* @param {string} pack The target compendium pack.
* @param {string[]} files The source files.
* @param {Partial<PackageOptions>} [options]
* @param {Partial<CompileOptions>} [options]
* @returns {Promise<void>}
*/
async function compileNedb(pack, files, { log, transformEntry }={}) {
async function compileNedb(pack, files, { log, transformEntry, filesAreObjects=false }={}) {
// Delete the existing NeDB file if it exists.
try {
fs.unlinkSync(pack);
Expand All @@ -223,10 +226,7 @@ async function compileNedb(pack, files, { log, transformEntry }={}) {
// Iterate over all source files, writing them to the DB.
for ( const file of files ) {
try {
const contents = fs.readFileSync(file, "utf8");
const ext = path.extname(file);
const isYaml = ext === ".yml" || ext === ".yaml";
const doc = isYaml ? YAML.load(contents) : JSON.parse(contents);
const doc = filesAreObjects ? file : loadFile(file);
const key = doc._key;
const [, collection] = key.split("!");
// If the key starts with !folders, we should skip packing it as NeDB doesn't support folders.
Expand All @@ -252,10 +252,10 @@ async function compileNedb(pack, files, { log, transformEntry }={}) {
* Compile a set of files into a LevelDB compendium pack.
* @param {string} pack The target compendium pack.
* @param {string[]} files The source files.
* @param {Partial<PackageOptions>} [options]
* @param {Partial<CompileOptions>} [options]
* @returns {Promise<void>}
*/
async function compileClassicLevel(pack, files, { log, transformEntry }={}) {
async function compileClassicLevel(pack, files, { log, transformEntry, filesAreObjects=false }={}) {
// Create the classic level directory if it doesn't already exist.
fs.mkdirSync(pack, { recursive: true });

Expand All @@ -279,10 +279,7 @@ async function compileClassicLevel(pack, files, { log, transformEntry }={}) {
// Iterate over all files in the input directory, writing them to the DB.
for ( const file of files ) {
try {
const contents = fs.readFileSync(file, "utf8");
const ext = path.extname(file);
const isYaml = ext === ".yml" || ext === ".yaml";
const doc = isYaml ? YAML.load(contents) : JSON.parse(contents);
const doc = filesAreObjects ? file : loadFile(file);
const [, collection] = doc._key.split("!");
if ( await transformEntry?.(doc) === false ) continue;
await packDoc(doc, collection);
Expand Down Expand Up @@ -550,3 +547,17 @@ function keyJoin(...args) {
function getSafeFilename(filename) {
return filename.replace(/[^a-zA-Z0-9А-я]/g, '_');
}

/* -------------------------------------------- */

/**
* Load file jSON or YAML file. Use extension to tell type.
* @param {string} filename The filename to load
* @returns {object} The file data as an object
*/
function loadFile(filename) {
const contents = fs.readFileSync(filename, "utf8");
const ext = path.extname(filename);
const isYaml = ext === ".yml" || ext === ".yaml";
return isYaml ? YAML.load(contents) : JSON.parse(contents);
}