From 2a7049a6ddd26766275ee515d4f55a0301d3acd1 Mon Sep 17 00:00:00 2001 From: Anton Vikulov Date: Thu, 19 Oct 2023 19:23:19 +0500 Subject: [PATCH] fix(cache): fix --- src/models.ts | 19 ++-- src/resolvers/lintPage.ts | 1 - src/resolvers/md2html.ts | 5 +- src/resolvers/md2md.ts | 49 +++++---- src/services/cache/cache.ts | 45 +++++++-- src/services/cache/cacheFile.ts | 169 ++++++++++++++++++++------------ src/services/utils.ts | 2 +- src/utils/pluginEnvApi.ts | 62 ++++++------ src/utils/presets.ts | 6 +- 9 files changed, 224 insertions(+), 134 deletions(-) diff --git a/src/models.ts b/src/models.ts index 7d09e209e..afd2bd42a 100644 --- a/src/models.ts +++ b/src/models.ts @@ -6,7 +6,7 @@ import {Lang, Stage, IncludeMode, ResourceType} from './constants'; import {ChangelogItem} from '@diplodoc/transform/lib/plugins/changelog/types'; import PluginEnvApi from './utils/pluginEnvApi'; -export type VarsPreset = 'internal'|'external'; +export type VarsPreset = 'internal' | 'external'; export type YfmPreset = Record; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -14,7 +14,10 @@ export type Metadata = Record; export type ExternalAuthorByPathFunction = (path: string) => Contributor | null; export type ContributorsByPathFunction = (path: string) => Promise; -export type NestedContributorsForPathFunction = (path: string, nestedContributors: Contributors) => void; +export type NestedContributorsForPathFunction = ( + path: string, + nestedContributors: Contributors, +) => void; export type UserByLoginFunction = (login: string) => Promise; export type CollectionOfPluginsFunction = (output: string, options: PluginOptions) => string; @@ -99,7 +102,7 @@ export type YfmTocIncluder = { export const includersNames = ['sourcedocs', 'openapi', 'generic', 'unarchive'] as const; -export type YfmTocIncluderName = typeof includersNames[number]; +export type YfmTocIncluderName = (typeof includersNames)[number]; // eslint-disable-next-line @typescript-eslint/no-explicit-any export type Includer = { @@ -107,7 +110,9 @@ export type Includer = { includerFunction: IncluderFunction; }; -export type IncluderFunction = (args: IncluderFunctionParams) => Promise; +export type IncluderFunction = ( + args: IncluderFunctionParams, +) => Promise; export type IncluderFunctionParams = { // item that contains include that uses includer @@ -151,7 +156,7 @@ export interface LeadingPageLinks extends Filter { } export interface Filter { - when?: boolean|string; + when?: boolean | string; [key: string]: unknown; } @@ -243,8 +248,8 @@ export type Resources = { }; export type YandexCloudTranslateGlossaryPair = { - sourceText: string; - translatedText: string; + sourceText: string; + translatedText: string; }; export type CommitInfo = { diff --git a/src/resolvers/lintPage.ts b/src/resolvers/lintPage.ts index e223f7163..f34c563ad 100644 --- a/src/resolvers/lintPage.ts +++ b/src/resolvers/lintPage.ts @@ -93,7 +93,6 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void const pluginOptions: PluginOptions = { ...options, vars, - varsHashList, root, path: localPath, lintMarkdown, // Should pass the function for linting included files diff --git a/src/resolvers/md2html.ts b/src/resolvers/md2html.ts index 0e68d60d4..014701379 100644 --- a/src/resolvers/md2html.ts +++ b/src/resolvers/md2html.ts @@ -131,7 +131,10 @@ export function liquidMd2Html(input: string, vars: Record, path }); } -async function MdFileTransformer(content: string, transformOptions: FileTransformOptions): Promise { +async function MdFileTransformer( + content: string, + transformOptions: FileTransformOptions, +): Promise { const {input, ...options} = ArgvService.getConfig(); const {path: filePath} = transformOptions; diff --git a/src/resolvers/md2md.ts b/src/resolvers/md2md.ts index 827adad11..785a2187e 100644 --- a/src/resolvers/md2md.ts +++ b/src/resolvers/md2md.ts @@ -22,7 +22,11 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise const rawContent = fs.readFileSync(resolvedInputPath, 'utf8'); - const cacheKey = cacheServiceBuildMd.getHashKey({filename: inputPath, content: rawContent, varsHashList}); + const cacheKey = cacheServiceBuildMd.getHashKey({ + filename: inputPath, + content: rawContent, + varsHashList, + }); let result: string; let changelogs: ChangelogItem[]; @@ -31,15 +35,15 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise if (cachedFile) { logger.info(inputPath, CACHE_HIT); await cachedFile.extractCacheAsync(); - const results = cachedFile.getResult<{result: string; changelogs: ChangelogItem[]; logs: Record}>(); + const results = cachedFile.getResult<{ + result: string; + changelogs: ChangelogItem[]; + logs: Record; + }>(); result = results.result; changelogs = results.changelogs; } else { - const content = await getContentWithUpdatedMetadata( - rawContent, - metadata, - vars.__system, - ); + const content = await getContentWithUpdatedMetadata(rawContent, metadata, vars.__system); const cacheFile = cacheServiceBuildMd.createFile(cacheKey); const envApi = PluginEnvApi.create({ @@ -90,7 +94,10 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise changesName = Math.trunc(new Date(changesDate).getTime() / 1000); } if (!changesName) { - changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart(3, '0')}`; + changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart( + 3, + '0', + )}`; } const changesPath = join(outputDir, `changes-${changesName}.json`); @@ -99,10 +106,13 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise throw new Error(`Changelog ${changesPath} already exists!`); } - fs.writeFileSync(changesPath, JSON.stringify({ - ...changes, - source: mdFilename, - })); + fs.writeFileSync( + changesPath, + JSON.stringify({ + ...changes, + source: mdFilename, + }), + ); }); } @@ -116,7 +126,10 @@ function copyFile(targetPath: string, targetDestPath: string, options?: PluginOp const {envApi} = options; let sourceIncludeContent: string; if (envApi) { - sourceIncludeContent = envApi.readFile(relative(envApi.root, targetPath), 'utf-8') as string; + sourceIncludeContent = envApi.readFile( + relative(envApi.root, targetPath), + 'utf-8', + ) as string; } else { sourceIncludeContent = fs.readFileSync(targetPath, 'utf8'); } @@ -136,11 +149,7 @@ function copyFile(targetPath: string, targetDestPath: string, options?: PluginOp } export function liquidMd2Md(input: string, vars: Record, path: string) { - const { - applyPresets, - resolveConditions, - conditionsInCode, - } = ArgvService.getConfig(); + const {applyPresets, resolveConditions, conditionsInCode} = ArgvService.getConfig(); return liquid(input, vars, path, { conditions: resolveConditions, @@ -152,9 +161,7 @@ export function liquidMd2Md(input: string, vars: Record, path: } function transformMd2Md(input: string, options: PluginOptions) { - const { - disableLiquid, - } = ArgvService.getConfig(); + const {disableLiquid} = ArgvService.getConfig(); const { vars = {}, path, diff --git a/src/services/cache/cache.ts b/src/services/cache/cache.ts index 35a5c6a80..7969e4a71 100644 --- a/src/services/cache/cache.ts +++ b/src/services/cache/cache.ts @@ -51,17 +51,36 @@ export class CacheService { if (!argsHash) { const args = ArgvService.getConfig(); const staticArgs = pick(args, [ - 'varsPreset', 'ignore', 'outputFormat', 'allowHTML', 'vars', 'applyPresets', - 'resolveConditions', 'conditionsInCode', 'disableLiquid', 'strict', 'ignoreStage', 'singlePage', - 'removeHiddenTocItems', 'connector', 'lang', 'lintConfig', 'resources', 'addSystemMeta', - 'contributors', 'ignoreAuthorPatterns', 'allowCustomResources', + 'varsPreset', + 'ignore', + 'outputFormat', + 'allowHTML', + 'vars', + 'applyPresets', + 'resolveConditions', + 'conditionsInCode', + 'disableLiquid', + 'strict', + 'ignoreStage', + 'singlePage', + 'removeHiddenTocItems', + 'connector', + 'lang', + 'lintConfig', + 'resources', + 'addSystemMeta', + 'contributors', + 'ignoreAuthorPatterns', + 'allowCustomResources', ]); argsHash = CacheService.getHash(JSON.stringify(staticArgs)); } const contentHash = CacheService.getHash(content); return { key: this.getHash(JSON.stringify({filename, contentHash, varsHashList, argsHash})), - filename, contentHash, varsHashList, + filename, + contentHash, + varsHashList, }; } @@ -79,7 +98,9 @@ export class CacheService { } checkFile({key}: HashKey) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(key); if (!fs.existsSync(filepath)) { @@ -97,7 +118,9 @@ export class CacheService { } async checkFileAsync({key}: HashKey) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(key); const exists = await fileExists(filepath); @@ -121,7 +144,9 @@ export class CacheService { } addFile(file: CacheFile) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(file.getKey()); const place = path.dirname(filepath); @@ -134,7 +159,9 @@ export class CacheService { } async addFileAsync(file: CacheFile) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(file.getKey()); const place = path.dirname(filepath); diff --git a/src/services/cache/cacheFile.ts b/src/services/cache/cacheFile.ts index 909e7dbb5..21efddfe1 100644 --- a/src/services/cache/cacheFile.ts +++ b/src/services/cache/cacheFile.ts @@ -36,7 +36,9 @@ class CacheFile { } use() { - if (this.disabled) { return undefined; } + if (this.disabled) { + return undefined; + } return this; } @@ -56,35 +58,51 @@ class CacheFile { const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; for (const filename in fileVarsDeps) { - if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { + continue; + } const reqVarsHashList = fileVarsDeps[filename]; const {varsHashList} = getVarsPerFileWithHash(filename); - if (!isEqual(varsHashList, reqVarsHashList)) { return; } + if (!isEqual(varsHashList, reqVarsHashList)) { + return; + } } for (const to in copiedFiles) { - if (!Object.hasOwnProperty.call(copiedFiles, to)) { continue; } + if (!Object.hasOwnProperty.call(copiedFiles, to)) { + continue; + } const from = copiedFiles[to]; const filepath = path.join(root, from); - if (!fs.existsSync(filepath)) { return; } + if (!fs.existsSync(filepath)) { + return; + } } for (const filename in existsFiles) { - if (!Object.hasOwnProperty.call(existsFiles, filename)) { continue; } + if (!Object.hasOwnProperty.call(existsFiles, filename)) { + continue; + } const reqState = existsFiles[filename]; const filepath = path.join(root, filename); - if (fs.existsSync(filepath) !== reqState) { return; } + if (fs.existsSync(filepath) !== reqState) { + return; + } } for (const filename in fileDeps) { - if (!Object.hasOwnProperty.call(fileDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileDeps, filename)) { + continue; + } const reqContentHash = fileDeps[filename]; const filepath = path.join(root, filename); - if (!fs.existsSync(filepath)) { return; } + if (!fs.existsSync(filepath)) { + return; + } const contentHash = CacheService.getFileHash(filepath); if (contentHash !== reqContentHash) { return; @@ -102,42 +120,58 @@ class CacheFile { const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; for (const filename in fileVarsDeps) { - if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { + continue; + } const reqVarsHashList = fileVarsDeps[filename]; const {varsHashList} = getVarsPerFileWithHash(filename); - if (!isEqual(varsHashList, reqVarsHashList)) { return; } + if (!isEqual(varsHashList, reqVarsHashList)) { + return; + } } const tasks: (() => Promise)[] = []; - Object.entries(copiedFiles).forEach(([, from]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, from); - const isExists = await fileExists(filepath); - if (!isExists) { - throw new Error('Aborted'); - } - }))); - - Object.entries(existsFiles).forEach(([filename, reqState]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, filename as string); - const isExists = await fileExists(filepath); - if (isExists !== reqState as boolean) { - throw new Error('Aborted'); - } - }))); - - Object.entries(fileDeps).forEach(([filename, reqContentHash]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, filename); - const isExists = await fileExists(filepath); - if (!isExists) { - throw new Error('Aborted'); - } - const contentHash = await CacheService.getFileHashAsync(filepath); - if (contentHash !== reqContentHash) { - throw new Error('Aborted'); - } - }))); + Object.entries(copiedFiles).forEach(([, from]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, from); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + }), + ), + ); + + Object.entries(existsFiles).forEach(([filename, reqState]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, filename as string); + const isExists = await fileExists(filepath); + if (isExists !== (reqState as boolean)) { + throw new Error('Aborted'); + } + }), + ), + ); + + Object.entries(fileDeps).forEach(([filename, reqContentHash]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, filename); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + const contentHash = await CacheService.getFileHashAsync(filepath); + if (contentHash !== reqContentHash) { + throw new Error('Aborted'); + } + }), + ), + ); try { await parallelLimit(tasks, CUNCURRENCY); @@ -185,10 +219,7 @@ class CacheFile { } async extractCacheAsync() { - await Promise.all([ - this.writeDataAsync(), - this.copyFilesAsync(), - ]); + await Promise.all([this.writeDataAsync(), this.copyFilesAsync()]); } extractCache() { @@ -216,14 +247,16 @@ class CacheFile { async writeAssetsAsync() { const {wroteFileData} = this; - const tasks = Object.entries(wroteFileData).map(([filename, data]) => asyncify(async () => { - const fullFilename = this.getAssetFilepath(filename); - const place = path.dirname(fullFilename); - if (!existsDir.has(place)) { - await fs.promises.mkdir(place, {recursive: true}); - } - await fs.promises.writeFile(fullFilename, data); - })); + const tasks = Object.entries(wroteFileData).map(([filename, data]) => + asyncify(async () => { + const fullFilename = this.getAssetFilepath(filename); + const place = path.dirname(fullFilename); + if (!existsDir.has(place)) { + await fs.promises.mkdir(place, {recursive: true}); + } + await fs.promises.writeFile(fullFilename, data); + }), + ); await parallelLimit(tasks, CUNCURRENCY); } @@ -249,13 +282,17 @@ class CacheFile { const {wroteFiles} = this.data; - await mapLimit(Object.entries(wroteFiles), CUNCURRENCY, asyncify(async ([to, assetName]: string[]) => { - const fullFrom = this.getAssetFilepath(assetName); - const fullTo = path.join(distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - })); + await mapLimit( + Object.entries(wroteFiles), + CUNCURRENCY, + asyncify(async ([to, assetName]: string[]) => { + const fullFrom = this.getAssetFilepath(assetName); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + }), + ); } private copyFiles() { @@ -281,13 +318,17 @@ class CacheFile { const {copiedFiles} = this.data; - await mapLimit(Object.entries(copiedFiles), CUNCURRENCY, asyncify(async ([to, from]: string[]) => { - const fullFrom = path.join(root, from); - const fullTo = path.join(distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - })); + await mapLimit( + Object.entries(copiedFiles), + CUNCURRENCY, + asyncify(async ([to, from]: string[]) => { + const fullFrom = path.join(root, from); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + }), + ); } private getAssetFilepath(key: string) { diff --git a/src/services/utils.ts b/src/services/utils.ts index 7dfa1f7d5..f55271ed5 100644 --- a/src/services/utils.ts +++ b/src/services/utils.ts @@ -2,7 +2,7 @@ import evalExp from '@diplodoc/transform/lib/liquid/evaluation'; import {Filter, TextItems} from '../models'; import liquid from '@diplodoc/transform/lib/liquid'; import {ArgvService} from './index'; -import {Logger} from '@doc-tools/transform/src/transform/log'; +import {Logger} from '@diplodoc/transform/src/transform/log'; export interface FilterFilesOptions { resolveConditions?: boolean; diff --git a/src/utils/pluginEnvApi.ts b/src/utils/pluginEnvApi.ts index 5551fdee2..ee8c080c3 100644 --- a/src/utils/pluginEnvApi.ts +++ b/src/utils/pluginEnvApi.ts @@ -18,7 +18,9 @@ type WriteFileAsyncAction = {type: AsyncActionType.Write; to: string; data: stri type AsyncAction = CopyFileAsyncAction | WriteFileAsyncAction; interface PluginEnvApiProps { - root: string; distRoot: string; cacheFile?: CacheFile; + root: string; + distRoot: string; + cacheFile?: CacheFile; } class PluginEnvApi { @@ -26,9 +28,9 @@ class PluginEnvApi { return new PluginEnvApi(props); } - public readonly root: string; - public readonly distRoot: string; - public readonly cacheFile: CacheFile | undefined; + readonly root: string; + readonly distRoot: string; + readonly cacheFile: CacheFile | undefined; private readonly asyncActionQueue: AsyncAction[] = []; @@ -143,33 +145,37 @@ class PluginEnvApi { async executeActionsAsync() { const {asyncActionQueue} = this; - await mapLimit(asyncActionQueue.splice(0), CUNCURRENCY, asyncify(async (action: AsyncAction) => { - switch (action.type) { - case AsyncActionType.Copy: { - const {from, to} = action; - const fullFrom = path.join(this.root, from); - const fullTo = path.join(this.distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - if (this.cacheFile) { - this.cacheFile.addCopyFile({from, to}); + await mapLimit( + asyncActionQueue.splice(0), + CUNCURRENCY, + asyncify(async (action: AsyncAction) => { + switch (action.type) { + case AsyncActionType.Copy: { + const {from, to} = action; + const fullFrom = path.join(this.root, from); + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + if (this.cacheFile) { + this.cacheFile.addCopyFile({from, to}); + } + break; } - break; - } - case AsyncActionType.Write: { - const {to, data} = action; - const fullTo = path.join(this.distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.writeFile(fullTo, data); - if (this.cacheFile) { - this.cacheFile.addWriteFile(to, data); + case AsyncActionType.Write: { + const {to, data} = action; + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.writeFile(fullTo, data); + if (this.cacheFile) { + this.cacheFile.addWriteFile(to, data); + } + break; } - break; } - } - })); + }), + ); } } diff --git a/src/utils/presets.ts b/src/utils/presets.ts index 66e991b3b..5bdde32f9 100644 --- a/src/utils/presets.ts +++ b/src/utils/presets.ts @@ -13,7 +13,10 @@ export function getVarsPerFile(filePath: string): Record { }; } -export function getVarsPerFileWithHash(filePath: string): {varsHashList: string[]; vars: Record} { +export function getVarsPerFileWithHash(filePath: string): { + varsHashList: string[]; + vars: Record; +} { const {vars: argVars} = ArgvService.getConfig(); const {vars, varsHashList} = PresetService.getWithHash(dirname(filePath)); @@ -33,4 +36,3 @@ export function getVarsPerRelativeFile(filePath: string): Record return getVarsPerFile(relativeFilePath); } -