mirror of
https://github.com/Alfresco/alfresco-ng2-components.git
synced 2025-07-24 17:32:15 +00:00
Review Documentation and tools doc (#5600)
* add exclude files property * remove Unknown * fix reviewer * Update reviewChecker.js * Update docs/README.md Co-Authored-By: Mark Hulbert <39801222+m-hulbert@users.noreply.github.com> Co-authored-by: Mark Hulbert <39801222+m-hulbert@users.noreply.github.com>
This commit is contained in:
@@ -24,4 +24,10 @@ changes in functionality (and therefore don't need documenting).
|
||||
|
||||
The script sends comma-separated text to the command line. You can copy/paste
|
||||
this into a spreadsheet or redirect the output to a text file with a ".csv"
|
||||
suffix.
|
||||
suffix.
|
||||
|
||||
To use this tool you need before to set graphAuthToken variable with you github access token:
|
||||
|
||||
export graphAuthToken=GITHUB_TOKEN
|
||||
|
||||
npm run review-checker
|
||||
|
@@ -8,6 +8,7 @@ var jsyaml = require("js-yaml");
|
||||
var remark = require("remark");
|
||||
var frontMatter = require("remark-frontmatter");
|
||||
var mdCompact = require("mdast-util-compact");
|
||||
var minimatch = require("Minimatch");
|
||||
|
||||
var si = require("./sourceInfoClasses");
|
||||
|
||||
@@ -20,13 +21,28 @@ var defaultFolder = path.resolve("docs");
|
||||
var sourceInfoFolder = path.resolve("docs", "sourceinfo");
|
||||
|
||||
|
||||
function filterFiles(filePath) {
|
||||
let isAllowed = true;
|
||||
|
||||
this.excludedFileList = aggData['config'].exclude;
|
||||
|
||||
if (this.excludedFileList) {
|
||||
isAllowed = this.excludedFileList.filter((pattern) => {
|
||||
return minimatch(filePath, pattern.toString(), {
|
||||
nocase: true
|
||||
});
|
||||
}).length === 0;
|
||||
}
|
||||
|
||||
return isAllowed;
|
||||
}
|
||||
|
||||
|
||||
function updatePhase(mdCache, aggData) {
|
||||
var errorMessages;
|
||||
|
||||
toolList.forEach(toolName => {
|
||||
errorMessages = [];
|
||||
console.log(`Tool: ${toolName}`);
|
||||
toolModules[toolName].processDocs(mdCache, aggData, errorMessages);
|
||||
toolModules[toolName].processDocs(mdCache, aggData);
|
||||
});
|
||||
|
||||
var filenames = Object.keys(mdCache);
|
||||
@@ -51,27 +67,20 @@ function updatePhase(mdCache, aggData) {
|
||||
console.log(`Modified: ${pathname}`);
|
||||
}
|
||||
|
||||
fs.writeFileSync(filenames[i], remark().use(frontMatter, {type: 'yaml', fence: '---'}).data("settings", {paddedTable: false, gfm: false}).stringify(tree));
|
||||
fs.writeFileSync(filenames[i], remark().use(frontMatter, {
|
||||
type: 'yaml',
|
||||
fence: '---'
|
||||
}).data("settings", {paddedTable: false, gfm: false}).stringify(tree));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function deepCopy(obj) {
|
||||
// Despite how it looks, this technique is apparently quite efficient
|
||||
// because the JSON routines are implemented in C code and faster
|
||||
// than the equivalent JavaScript loops ;-)
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
function minimiseTree(tree) {
|
||||
let minPropsTree = JSON.parse(JSON.stringify(tree, (key, value) => key === "position" ? undefined : value));
|
||||
mdCompact(minPropsTree);
|
||||
return minPropsTree;
|
||||
}
|
||||
|
||||
|
||||
function loadToolModules() {
|
||||
var mods = {};
|
||||
var toolsFolderPath = path.resolve(__dirname, toolsFolderName);
|
||||
@@ -103,7 +112,7 @@ function getAllDocFilePaths(docFolder, files) {
|
||||
var itemPath = path.resolve(docFolder, items[i]);
|
||||
var itemInfo = fs.statSync(itemPath);
|
||||
|
||||
if (itemInfo.isFile()){
|
||||
if (itemInfo.isFile()) {
|
||||
files.push(itemPath);
|
||||
} else if (itemInfo.isDirectory()) {
|
||||
getAllDocFilePaths(itemPath, files);
|
||||
@@ -145,16 +154,13 @@ function initClassInfo(aggData) {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
program
|
||||
.usage("[options] <source>")
|
||||
.option("-p, --profile [profileName]", "Select named config profile", "default")
|
||||
.option("-j, --json", "Output JSON data for Markdown syntax tree")
|
||||
.option("-v, --verbose", "Log doc files as they are processed")
|
||||
.option("-t, --timing", "Output time taken for run")
|
||||
.parse(process.argv);
|
||||
.usage("[options] <source>")
|
||||
.option("-p, --profile [profileName]", "Select named config profile", "default")
|
||||
.option("-j, --json", "Output JSON data for Markdown syntax tree")
|
||||
.option("-v, --verbose", "Log doc files as they are processed")
|
||||
.option("-t, --timing", "Output time taken for run")
|
||||
.parse(process.argv);
|
||||
|
||||
var startTime;
|
||||
|
||||
@@ -179,7 +185,7 @@ aggData['config'] = config;
|
||||
|
||||
var toolList;
|
||||
|
||||
if (config.profiles[program.profile]){
|
||||
if (config.profiles[program.profile]) {
|
||||
toolList = config.profiles[program.profile];
|
||||
var toolListText = toolList.join(", ");
|
||||
console.log(`Using '${program.profile}' profile: ${toolListText}`);
|
||||
@@ -194,16 +200,15 @@ if (sourceInfo.isDirectory()) {
|
||||
getAllDocFilePaths(sourcePath, files);
|
||||
aggData['rootFolder'] = path.dirname(sourcePath);
|
||||
} else if (sourceInfo.isFile()) {
|
||||
files = [ sourcePath ];
|
||||
files = [sourcePath];
|
||||
}
|
||||
|
||||
files = files.filter(filename =>
|
||||
(filename !== undefined) &&
|
||||
(path.extname(filename) === ".md") &&
|
||||
(filename !== "README.md")
|
||||
(filename !== "README.md") && filterFiles(filename)
|
||||
);
|
||||
|
||||
|
||||
var mdCache = initMdCache(files);
|
||||
|
||||
console.log("Loading source data...");
|
||||
|
@@ -90,7 +90,10 @@
|
||||
"UserInfo": "https://github.com/Alfresco/alfresco-js-api/blob/development/src/api/content-rest-api/docs/UserInfo.md",
|
||||
"UserProcessInstanceFilterRepresentation": "https://github.com/Alfresco/alfresco-js-api/blob/development/src/api/activiti-rest-api/docs/UserProcessInstanceFilterRepresentation.md",
|
||||
"UserRepresentation": "https://github.com/Alfresco/alfresco-js-api/blob/development/src/api/activiti-rest-api/docs/UserRepresentation.md",
|
||||
"VersionsApi": "https://github.com/Alfresco/alfresco-js-api/blob/development/src/api/content-rest-api/api/versions.api.ts"
|
||||
"VersionsApi": "https://github.com/Alfresco/alfresco-js-api/blob/development/src/api/content-rest-api/api/versions.api.ts",
|
||||
"MenuPositionX": "https://github.com/angular/components/blob/master/src/material/menu/menu-positions.ts",
|
||||
"MenuPositionY": "https://github.com/angular/components/blob/master/src/material/menu/menu-positions.ts",
|
||||
"ThemePalette": "https://github.com/angular/components/blob/master/src/material/core/common-behaviors/color.ts"
|
||||
},
|
||||
"linkOverrides": [
|
||||
"activiti", "auth", "authentication", "comment", "company", "core", "download",
|
||||
@@ -199,6 +202,12 @@
|
||||
"app-details-cloud",
|
||||
"dynamic-tab"
|
||||
],
|
||||
"exclude": [
|
||||
"**/vulnerability/**",
|
||||
"**/release-notes/**",
|
||||
"**/tutorials/**",
|
||||
"**/user-guide/**"
|
||||
],
|
||||
"fileCheckerFilter": [
|
||||
"README",
|
||||
"compatibility",
|
||||
@@ -208,4 +217,4 @@
|
||||
"user-guide",
|
||||
"versionIndex"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -24,6 +24,7 @@ var docsFolderPath = path.resolve('docs');
|
||||
var libFolders = ['core', 'content-services', 'extensions', 'insights', 'process-services', 'process-services-cloud'];
|
||||
libsearch(srcData, path.resolve(libFolder));
|
||||
var authToken = process.env.graphAuthToken;
|
||||
|
||||
var client = new graphql_request_1.GraphQLClient('https://api.github.com/graphql', {
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + authToken
|
||||
@@ -33,21 +34,25 @@ var query = "query commitHistory($path: String) {\n repository(name: \"alfresco
|
||||
var docFiles = getDocFilePaths(docsFolderPath);
|
||||
var docNames = rxjs_1.of(docFiles);
|
||||
console.log("'Name','Review date','Commits since review','Score'");
|
||||
docNames.subscribe(function (x) {
|
||||
var key = path.basename(x, '.md');
|
||||
if (!srcData[key]) {
|
||||
return;
|
||||
}
|
||||
var vars = {
|
||||
'path': 'lib/' + srcData[key].path
|
||||
};
|
||||
client.request(query, vars).then(function (data) {
|
||||
var nodes = data['repository'].ref.target.history.nodes;
|
||||
var lastReviewDate = getDocReviewDate(x); // (key + ".md");
|
||||
var numUsefulCommits = extractCommitInfo(nodes, lastReviewDate, stoplist);
|
||||
var dateString = lastReviewDate.format('YYYY-MM-DD');
|
||||
var score = priorityScore(lastReviewDate, numUsefulCommits).toPrecision(3);
|
||||
console.log("'" + key + "','" + dateString + "','" + numUsefulCommits + "','" + score + "'");
|
||||
docNames.subscribe(function (docs) {
|
||||
docs.forEach(function (x) {
|
||||
var key = path.basename(x, '.md');
|
||||
if (!srcData[key]) {
|
||||
return;
|
||||
}
|
||||
var vars = {
|
||||
'path': 'lib/' + srcData[key].path
|
||||
};
|
||||
client.request(query, vars).then(function (data) {
|
||||
var nodes = data['repository'].ref.target.history.nodes;
|
||||
var lastReviewDate = getDocReviewDate(x); // (key + ".md");
|
||||
var numUsefulCommits = extractCommitInfo(nodes, lastReviewDate, stoplist);
|
||||
if (numUsefulCommits > 0) {
|
||||
var dateString = lastReviewDate.format('YYYY-MM-DD');
|
||||
var score = priorityScore(lastReviewDate, numUsefulCommits).toPrecision(3);
|
||||
console.log("'" + key + "','" + dateString + "','" + numUsefulCommits + "','" + score + "'");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
function priorityScore(reviewDate, numCommits) {
|
||||
|
@@ -29,16 +29,16 @@ const stoplist = new Stoplist(stoplistFilePath);
|
||||
|
||||
const docsFolderPath = path.resolve('docs');
|
||||
|
||||
const libFolders = ['core', 'content-services', 'extensions', 'insights', 'process-services', 'process-services-cloud'];
|
||||
const libFolders = ['core', 'content-services', 'extensions', 'insights', 'process-services', 'process-services-cloud'];
|
||||
|
||||
libsearch(srcData, path.resolve(libFolder));
|
||||
|
||||
const authToken = process.env.graphAuthToken;
|
||||
|
||||
const client = new GraphQLClient('https://api.github.com/graphql', {
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + authToken
|
||||
}
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + authToken
|
||||
}
|
||||
});
|
||||
|
||||
const query = `query commitHistory($path: String) {
|
||||
@@ -64,99 +64,104 @@ const docNames = of(docFiles);
|
||||
|
||||
console.log("'Name','Review date','Commits since review','Score'");
|
||||
|
||||
docNames.subscribe(x => {
|
||||
const key = path.basename(x, '.md');
|
||||
docNames.subscribe(docs => {
|
||||
|
||||
if (!srcData[key]) {
|
||||
return;
|
||||
}
|
||||
docs.forEach(x => {
|
||||
const key = path.basename(x, '.md');
|
||||
|
||||
const vars = {
|
||||
'path': 'lib/' + srcData[key].path
|
||||
};
|
||||
if (!srcData[key]) {
|
||||
return;
|
||||
}
|
||||
|
||||
client.request(query, vars).then(data => {
|
||||
const nodes = data['repository'].ref.target.history.nodes;
|
||||
const vars = {
|
||||
'path': 'lib/' + srcData[key].path
|
||||
};
|
||||
|
||||
const lastReviewDate = getDocReviewDate(x); // (key + ".md");
|
||||
client.request(query, vars).then(data => {
|
||||
const nodes = data['repository'].ref.target.history.nodes;
|
||||
|
||||
const numUsefulCommits = extractCommitInfo(nodes, lastReviewDate, stoplist);
|
||||
const dateString = lastReviewDate.format('YYYY-MM-DD');
|
||||
const score = priorityScore(lastReviewDate, numUsefulCommits).toPrecision(3);
|
||||
const lastReviewDate = getDocReviewDate(x); // (key + ".md");
|
||||
|
||||
console.log(`'${key}','${dateString}','${numUsefulCommits}','${score}'`);
|
||||
});
|
||||
const numUsefulCommits = extractCommitInfo(nodes, lastReviewDate, stoplist);
|
||||
if (numUsefulCommits > 0) {
|
||||
const dateString = lastReviewDate.format('YYYY-MM-DD');
|
||||
const score = priorityScore(lastReviewDate, numUsefulCommits).toPrecision(3);
|
||||
|
||||
console.log(`'${key}','${dateString}','${numUsefulCommits}','${score}'`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function priorityScore(reviewDate, numCommits) {
|
||||
const daysSinceReview = moment().diff(reviewDate, 'days');
|
||||
const commitScore = 2 + numCommits * commitWeight;
|
||||
return Math.pow(commitScore, daysSinceReview / scoreTimeBase);
|
||||
const daysSinceReview = moment().diff(reviewDate, 'days');
|
||||
const commitScore = 2 + numCommits * commitWeight;
|
||||
return Math.pow(commitScore, daysSinceReview / scoreTimeBase);
|
||||
}
|
||||
|
||||
function getDocReviewDate(docFileName) {
|
||||
const mdFilePath = path.resolve(docsFolderPath, docFileName);
|
||||
const mdFilePath = path.resolve(docsFolderPath, docFileName);
|
||||
|
||||
const mdText = fs.readFileSync(mdFilePath);
|
||||
const tree = remark().use(frontMatter, ['yaml']).parse(mdText);
|
||||
const mdText = fs.readFileSync(mdFilePath);
|
||||
const tree = remark().use(frontMatter, ['yaml']).parse(mdText);
|
||||
|
||||
let lastReviewDate = moment(adf20StartDate);
|
||||
let lastReviewDate = moment(adf20StartDate);
|
||||
|
||||
if (tree.children[0].type === 'yaml') {
|
||||
const metadata = yaml.load(tree.children[0].value);
|
||||
if (tree.children[0].type === 'yaml') {
|
||||
const metadata = yaml.load(tree.children[0].value);
|
||||
|
||||
if (metadata['Last reviewed']) {
|
||||
lastReviewDate = moment(metadata['Last reviewed']);
|
||||
if (metadata['Last reviewed']) {
|
||||
lastReviewDate = moment(metadata['Last reviewed']);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lastReviewDate;
|
||||
return lastReviewDate;
|
||||
}
|
||||
|
||||
function extractCommitInfo(commitNodes, cutOffDate, stoplist) {
|
||||
let numUsefulCommits = 0;
|
||||
let numUsefulCommits = 0;
|
||||
|
||||
commitNodes.forEach(element => {
|
||||
if (!stoplist.isRejected(element.message)) {
|
||||
// const abbr = element.message.substr(0, 15);
|
||||
commitNodes.forEach(element => {
|
||||
if (!stoplist.isRejected(element.message)) {
|
||||
// const abbr = element.message.substr(0, 15);
|
||||
|
||||
const commitDate = moment(element.pushedDate);
|
||||
const commitDate = moment(element.pushedDate);
|
||||
|
||||
if (commitDate.isAfter(cutOffDate)) {
|
||||
numUsefulCommits++;
|
||||
}
|
||||
}
|
||||
});
|
||||
if (commitDate.isAfter(cutOffDate)) {
|
||||
numUsefulCommits++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return numUsefulCommits;
|
||||
return numUsefulCommits;
|
||||
}
|
||||
|
||||
function getDocFilePaths(folderPath) {
|
||||
const result = [];
|
||||
const result = [];
|
||||
|
||||
libFolders.forEach(element => {
|
||||
const libPath = path.resolve(folderPath, element);
|
||||
addItemsRecursively(libPath, result);
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
function addItemsRecursively(elementPath: string, resultList: string[]) {
|
||||
const items = fs.readdirSync(elementPath);
|
||||
|
||||
items.forEach(item => {
|
||||
const fullItemPath = path.resolve(elementPath, item);
|
||||
const itemInfo = fs.statSync(fullItemPath);
|
||||
|
||||
if (itemInfo.isDirectory()) {
|
||||
addItemsRecursively(fullItemPath, resultList);
|
||||
} else if (
|
||||
(path.extname(fullItemPath) === '.md') &&
|
||||
(item !== 'README.md') &&
|
||||
(item.match(angFilePattern))
|
||||
) {
|
||||
resultList.push(fullItemPath);
|
||||
}
|
||||
libFolders.forEach(element => {
|
||||
const libPath = path.resolve(folderPath, element);
|
||||
addItemsRecursively(libPath, result);
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
function addItemsRecursively(elementPath: string, resultList: string[]) {
|
||||
const items = fs.readdirSync(elementPath);
|
||||
|
||||
items.forEach(item => {
|
||||
const fullItemPath = path.resolve(elementPath, item);
|
||||
const itemInfo = fs.statSync(fullItemPath);
|
||||
|
||||
if (itemInfo.isDirectory()) {
|
||||
addItemsRecursively(fullItemPath, resultList);
|
||||
} else if (
|
||||
(path.extname(fullItemPath) === '.md') &&
|
||||
(item !== 'README.md') &&
|
||||
(item.match(angFilePattern))
|
||||
) {
|
||||
resultList.push(fullItemPath);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -16,7 +16,7 @@ var PropInfo = /** @class */ (function () {
|
||||
this.defaultValue = tempDefaultVal ? tempDefaultVal.toString() : '';
|
||||
this.defaultValue = this.defaultValue.replace(/\|/, '\\|');
|
||||
this.type = sourceData.syntax['return'].type || '';
|
||||
this.type = this.type.toString().replace(/\|/, '\\|');
|
||||
this.type = this.type.toString().replace(/\|/, '\\|').replace('unknown', '');
|
||||
if (sourceData.tags) {
|
||||
var depTag = sourceData.tags.find(function (tag) { return tag.name === 'deprecated'; });
|
||||
if (depTag) {
|
||||
|
@@ -27,7 +27,7 @@ export class PropInfo {
|
||||
this.defaultValue = tempDefaultVal ? tempDefaultVal.toString() : '';
|
||||
this.defaultValue = this.defaultValue.replace(/\|/, '\\|');
|
||||
this.type = sourceData.syntax['return'].type || '';
|
||||
this.type = this.type.toString().replace(/\|/, '\\|');
|
||||
this.type = this.type.toString().replace(/\|/, '\\|').replace('unknown', '');
|
||||
|
||||
if (sourceData.tags) {
|
||||
const depTag = sourceData.tags.find(tag => tag.name === 'deprecated');
|
||||
|
@@ -11,7 +11,7 @@ var imageFolderPath = path.resolve('docs', 'docassets', 'images');
|
||||
// and filenames is enough to trap errors like missing out the 'images'
|
||||
// folder in the path. Keeping it low avoids crazy suggestions.
|
||||
var maxImagePathLevDistance = 7;
|
||||
function processDocs(mdCache, aggData, errorMessages) {
|
||||
function processDocs(mdCache, aggData) {
|
||||
var pathnames = Object.keys(mdCache);
|
||||
var classlessDocs = [];
|
||||
var linkRefs = {};
|
||||
|
@@ -13,7 +13,7 @@ const imageFolderPath = path.resolve('docs', 'docassets', 'images');
|
||||
// folder in the path. Keeping it low avoids crazy suggestions.
|
||||
const maxImagePathLevDistance = 7;
|
||||
|
||||
export function processDocs(mdCache, aggData, errorMessages) {
|
||||
export function processDocs(mdCache, aggData) {
|
||||
const pathnames = Object.keys(mdCache);
|
||||
|
||||
const classlessDocs = [];
|
||||
|
@@ -5,7 +5,7 @@ var path = require("path");
|
||||
var fs = require("fs");
|
||||
var unist_util_select_1 = require("unist-util-select");
|
||||
var suffixesNotToCheck = /\.ts/;
|
||||
function processDocs(mdCache, aggData, errorMessages) {
|
||||
function processDocs(mdCache, aggData) {
|
||||
var pathnames = Object.keys(mdCache);
|
||||
var linkSet = new LinkSet(pathnames);
|
||||
var imageFolderPath = path.resolve(aggData['rootFolder'], 'docs', 'docassets', 'images');
|
||||
|
@@ -7,7 +7,7 @@ import { MDAST } from 'mdast';
|
||||
|
||||
const suffixesNotToCheck = /\.ts/;
|
||||
|
||||
export function processDocs(mdCache, aggData, errorMessages) {
|
||||
export function processDocs(mdCache, aggData) {
|
||||
const pathnames = Object.keys(mdCache);
|
||||
|
||||
const linkSet = new LinkSet(pathnames);
|
||||
|
@@ -10,7 +10,7 @@ var mdNav_1 = require("../mdNav");
|
||||
var ngHelpers_1 = require("../ngHelpers");
|
||||
var templateFolder = path.resolve('tools', 'doc', 'templates');
|
||||
var nameExceptions;
|
||||
function processDocs(mdCache, aggData, _errorMessages) {
|
||||
function processDocs(mdCache, aggData) {
|
||||
nameExceptions = aggData.config.typeNameExceptions;
|
||||
var pathnames = Object.keys(mdCache);
|
||||
var internalErrors;
|
||||
|
@@ -13,7 +13,7 @@ const templateFolder = path.resolve('tools', 'doc', 'templates');
|
||||
|
||||
let nameExceptions;
|
||||
|
||||
export function processDocs(mdCache, aggData, _errorMessages) {
|
||||
export function processDocs(mdCache, aggData) {
|
||||
nameExceptions = aggData.config.typeNameExceptions;
|
||||
|
||||
const pathnames = Object.keys(mdCache);
|
||||
@@ -51,6 +51,9 @@ function updateFile(tree, pathname, aggData, errorMessages) {
|
||||
// Copy docs back from the .md file when the JSDocs are empty.
|
||||
const inputMD = getPropDocsFromMD(tree, 'Properties', 3);
|
||||
const outputMD = getPropDocsFromMD(tree, 'Events', 2);
|
||||
|
||||
|
||||
|
||||
updatePropDocsFromMD(compData, inputMD, outputMD, errorMessages);
|
||||
|
||||
if (classType === 'service') {
|
||||
|
@@ -26,7 +26,7 @@ var initialVersion = "v2.0.0";
|
||||
var templateFolder = path.resolve("tools", "doc", "templates");
|
||||
|
||||
|
||||
function processDocs(mdCache, aggData, errorMessages) {
|
||||
function processDocs(mdCache, aggData) {
|
||||
initPhase(aggData);
|
||||
readPhase(mdCache, aggData);
|
||||
aggPhase(aggData);
|
||||
|
@@ -18,7 +18,7 @@
|
||||
"tools/sourceLinker.ts",
|
||||
"tools/tsInfo.ts",
|
||||
"tools/tutorialIndex.ts",
|
||||
"tools/typeLinker.ts"
|
||||
// "reviewChecker.ts"
|
||||
"tools/typeLinker.ts",
|
||||
"reviewChecker.ts"
|
||||
]
|
||||
}
|
||||
|
Reference in New Issue
Block a user