diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 414a289d..9734c854 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -68,4 +68,4 @@ jobs: continue-on-error: true - name: Start - run: docker run -t -v wikiGDriveExample:/data -v /home/wikigdrive/service_account.json:/service_account.json -v "/var/www/dev.wikigdrive.com:/usr/src/app/dist/hugo" wikigdrive-test wikigdrive --service_account /service_account.json --share_email mie-docs-wikigdrive@wikigdrive.iam.gserviceaccount.com --workdir /data pull 0AIkOKXbzWCtSUk9PVA + run: docker run -t -v wikiGDriveExample:/data -v /home/wikigdrive/service_account.json:/service_account.json -v "/var/www/dev.wikigdrive.com:/usr/src/app/dist/hugo" wikigdrive-test wikigdrive --service_account /service_account.json --share_email mie-docs-wikigdrive@wikigdrive.iam.gserviceaccount.com --transform_subdir / --workdir /data pull 0AIkOKXbzWCtSUk9PVA diff --git a/.gitignore b/.gitignore index 4311cca3..1f0dc005 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,7 @@ content hugo/resources hugo_stats.json +.wgd-directory.yaml +.tree.json +.wgd-local-links.csv +.wgd-local-log.csv diff --git a/apps/wgd-action-runner/site/navigation2menu.mjs b/apps/wgd-action-runner/site/navigation2menu.mjs new file mode 100755 index 00000000..e83e5245 --- /dev/null +++ b/apps/wgd-action-runner/site/navigation2menu.mjs @@ -0,0 +1,59 @@ +#!/usr/bin/node + +/** + * In order to generate menu.en.json from markdown file run: + * + * cat content/navigation.md | ./navigation2menu.js > config/_default/menu.en.json + */ + +// Work on POSIX and Windows +import fs from 'node:fs'; +const stdinBuffer = fs.readFileSync(0); // STDIN_FILENO = 0 + +const markdown = stdinBuffer.toString(); + +let weight = 10; + +const parentStack = []; +const menu = []; + +let lastContent = 'First line'; + +for (const line of markdown.split('\n')) { + if (!line.match(/^ *\* /)) { + continue; + } + const indentPart = line.replace(/(^ *\* ).*/, '$1'); + const markdownLink = line.substring(indentPart.length); + const matched = markdownLink.match(/\[([^\]]+)]\(([^)]+)\)/); + if (!matched) { + console.warn(`Warning: navigation.md menu has "${markdownLink}" without url near: "${lastContent}"`); + continue; + } + const [_, name, pageRef] = matched; + const level = (indentPart.length - 2)/3; + + while (parentStack.length > level) { + parentStack.pop(); + } + + const identifier = pageRef; + + if (pageRef.startsWith('http://') || pageRef.startsWith('https://') || fs.existsSync('./content/' + pageRef)) { + menu.push({ + identifier, + name, + pageRef, + parent: parentStack[parentStack.length - 1], + weight + }); + } else { + console.warn(`Warning: navigation.md menu has "${markdownLink}" without file: "${pageRef}"`); + } + + weight += 10; + parentStack.push(identifier); + lastContent = markdownLink; +} + +console.log(JSON.stringify({ main: menu }, null, 4)); diff --git a/package-lock.json b/package-lock.json index 2bf131c7..610fbeb3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -32,7 +32,7 @@ "compression": "1.7.4", "cookie-parser": "1.4.6", "dayjs": "1.11.1", - "dockerode": "3.3.2", + "dockerode": "4.0.2", "dom-serializer": "2.0.0", "domutils": "3.1.0", "dotenv": "^8.6.0", @@ -50,6 +50,7 @@ "lunr-languages": "1.10.0", "marked": "9.0.2", "mathjs": "10.5.0", + "mathml-to-latex": "1.4.0", "minimist": "1.2.6", "mitt": "^3.0.0", "open": "^7.4.2", @@ -78,7 +79,7 @@ "@swc/helpers": "0.5.3", "@types/async": "3.2.12", "@types/compression": "1.7.3", - "@types/dockerode": "3.3.9", + "@types/dockerode": "3.3.28", "@types/express": "4.17.13", "@types/lunr": "2.3.4", "@types/mocha": "10.0.2", @@ -162,6 +163,11 @@ "node": ">=6.9.0" } }, + "node_modules/@balena/dockerignore": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" + }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -1282,13 +1288,14 @@ } }, "node_modules/@types/dockerode": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.9.tgz", - "integrity": "sha512-SYRN5FF/qmwpxUT6snJP5D8k0wgoUKOGVs625XvpRJOOUi6s//UYI4F0tbyE3OmzpI70Fo1+aqpzX27zCrInww==", + "version": "3.3.28", + "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.28.tgz", + "integrity": "sha512-RjY96chW88t2QvSebCsec+mQYo3/nyOr+/tVcE+0ynlOg2m/i9wPE52DhptzF75QDlhv2uDYVPqKfHKeGTn6Fg==", "dev": true, "dependencies": { "@types/docker-modem": "*", - "@types/node": "*" + "@types/node": "*", + "@types/ssh2": "*" } }, "node_modules/@types/express": { @@ -1746,6 +1753,14 @@ "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz", "integrity": "sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==" }, + "node_modules/@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -1963,7 +1978,7 @@ "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "dependencies": { "tweetnacl": "^0.14.3" } @@ -2117,9 +2132,9 @@ "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, "node_modules/buildcheck": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.3.tgz", - "integrity": "sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA==", + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", "optional": true, "engines": { "node": ">=10.0.0" @@ -2520,14 +2535,14 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "node_modules/cpu-features": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.4.tgz", - "integrity": "sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A==", + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", + "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", "hasInstallScript": true, "optional": true, "dependencies": { - "buildcheck": "0.0.3", - "nan": "^2.15.0" + "buildcheck": "~0.0.6", + "nan": "^2.17.0" }, "engines": { "node": ">=10.0.0" @@ -2713,23 +2728,23 @@ } }, "node_modules/docker-modem": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.5.tgz", - "integrity": "sha512-x1E6jxWdtoK3+ifAUWj4w5egPdTDGBpesSCErm+aKET5BnnEOvDtTP6GxcnMB1zZiv2iQ0qJZvJie+1wfIRg6Q==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz", + "integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==", "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.4.0" + "ssh2": "^1.15.0" }, "engines": { "node": ">= 8.0" } }, "node_modules/docker-modem/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -2740,11 +2755,12 @@ } }, "node_modules/dockerode": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-3.3.2.tgz", - "integrity": "sha512-oXN+1XVH2TeyE0Jj9Ci6Fim8ZIDxyqeJrkx9qhEOaRiA+nhLihKfd3M2L+Aqrj5C2ObPw8RVN2zPWvvk0x2dwg==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz", + "integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==", "dependencies": { - "docker-modem": "^3.0.0", + "@balena/dockerignore": "^1.0.2", + "docker-modem": "^5.0.3", "tar-fs": "~2.0.1" }, "engines": { @@ -4516,6 +4532,14 @@ "node": ">= 12" } }, + "node_modules/mathml-to-latex": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/mathml-to-latex/-/mathml-to-latex-1.4.0.tgz", + "integrity": "sha512-dRVr2hCh/dwM8Cn1ZlKtb1Rw48z4fsUuZIWoOdMZ3Tct0v+QMSgxrO2nV69UIgySF51VW8qPEskNzhLLBrl5QQ==", + "dependencies": { + "@xmldom/xmldom": "^0.8.10" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4750,9 +4774,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/nan": { - "version": "2.16.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", - "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", + "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==", "optional": true }, "node_modules/nanoid": { @@ -5863,20 +5887,20 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "node_modules/ssh2": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.11.0.tgz", - "integrity": "sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz", + "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==", "hasInstallScript": true, "dependencies": { - "asn1": "^0.2.4", + "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2" }, "engines": { "node": ">=10.16.0" }, "optionalDependencies": { - "cpu-features": "~0.0.4", - "nan": "^2.16.0" + "cpu-features": "~0.0.9", + "nan": "^2.18.0" } }, "node_modules/stack-trace": { @@ -6158,7 +6182,7 @@ "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "node_modules/type-check": { "version": "0.4.0", @@ -6634,6 +6658,11 @@ "regenerator-runtime": "^0.13.4" } }, + "@balena/dockerignore": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" + }, "@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -7316,13 +7345,14 @@ } }, "@types/dockerode": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.9.tgz", - "integrity": "sha512-SYRN5FF/qmwpxUT6snJP5D8k0wgoUKOGVs625XvpRJOOUi6s//UYI4F0tbyE3OmzpI70Fo1+aqpzX27zCrInww==", + "version": "3.3.28", + "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.28.tgz", + "integrity": "sha512-RjY96chW88t2QvSebCsec+mQYo3/nyOr+/tVcE+0ynlOg2m/i9wPE52DhptzF75QDlhv2uDYVPqKfHKeGTn6Fg==", "dev": true, "requires": { "@types/docker-modem": "*", - "@types/node": "*" + "@types/node": "*", + "@types/ssh2": "*" } }, "@types/express": { @@ -7682,6 +7712,11 @@ "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz", "integrity": "sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==" }, + "@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==" + }, "accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -7846,7 +7881,7 @@ "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "requires": { "tweetnacl": "^0.14.3" } @@ -7965,9 +8000,9 @@ "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, "buildcheck": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.3.tgz", - "integrity": "sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA==", + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", "optional": true }, "bytes": { @@ -8278,13 +8313,13 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cpu-features": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.4.tgz", - "integrity": "sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A==", + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", + "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", "optional": true, "requires": { - "buildcheck": "0.0.3", - "nan": "^2.15.0" + "buildcheck": "~0.0.6", + "nan": "^2.17.0" } }, "create-require": { @@ -8410,20 +8445,20 @@ } }, "docker-modem": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.5.tgz", - "integrity": "sha512-x1E6jxWdtoK3+ifAUWj4w5egPdTDGBpesSCErm+aKET5BnnEOvDtTP6GxcnMB1zZiv2iQ0qJZvJie+1wfIRg6Q==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz", + "integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==", "requires": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.4.0" + "ssh2": "^1.15.0" }, "dependencies": { "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -8433,11 +8468,12 @@ } }, "dockerode": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-3.3.2.tgz", - "integrity": "sha512-oXN+1XVH2TeyE0Jj9Ci6Fim8ZIDxyqeJrkx9qhEOaRiA+nhLihKfd3M2L+Aqrj5C2ObPw8RVN2zPWvvk0x2dwg==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz", + "integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==", "requires": { - "docker-modem": "^3.0.0", + "@balena/dockerignore": "^1.0.2", + "docker-modem": "^5.0.3", "tar-fs": "~2.0.1" }, "dependencies": { @@ -9785,6 +9821,14 @@ "typed-function": "^2.1.0" } }, + "mathml-to-latex": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/mathml-to-latex/-/mathml-to-latex-1.4.0.tgz", + "integrity": "sha512-dRVr2hCh/dwM8Cn1ZlKtb1Rw48z4fsUuZIWoOdMZ3Tct0v+QMSgxrO2nV69UIgySF51VW8qPEskNzhLLBrl5QQ==", + "requires": { + "@xmldom/xmldom": "^0.8.10" + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -9959,9 +10003,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "nan": { - "version": "2.16.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", - "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", + "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==", "optional": true }, "nanoid": { @@ -10762,14 +10806,14 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "ssh2": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.11.0.tgz", - "integrity": "sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz", + "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==", "requires": { - "asn1": "^0.2.4", + "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2", - "cpu-features": "~0.0.4", - "nan": "^2.16.0" + "cpu-features": "~0.0.9", + "nan": "^2.18.0" } }, "stack-trace": { @@ -10984,7 +11028,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "type-check": { "version": "0.4.0", diff --git a/package.json b/package.json index 278dc6d7..6566f0a8 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,8 @@ "bin": { "wgd": "src/wikigdrive.sh", "wikigdrive": "src/wikigdrive.sh", - "wikigdrivectl": "src/wikigdrivectl.sh" + "wikigdrivectl": "src/wikigdrivectl.sh", + "odt2md": "src/odt2md.sh" }, "main": "src/cli/wikigdrive.ts", "scripts": { @@ -84,7 +85,7 @@ "compression": "1.7.4", "cookie-parser": "1.4.6", "dayjs": "1.11.1", - "dockerode": "3.3.2", + "dockerode": "4.0.2", "dom-serializer": "2.0.0", "domutils": "3.1.0", "dotenv": "^8.6.0", @@ -102,6 +103,7 @@ "lunr-languages": "1.10.0", "marked": "9.0.2", "mathjs": "10.5.0", + "mathml-to-latex": "1.4.0", "minimist": "1.2.6", "mitt": "^3.0.0", "open": "^7.4.2", @@ -125,7 +127,7 @@ "@swc/helpers": "0.5.3", "@types/async": "3.2.12", "@types/compression": "1.7.3", - "@types/dockerode": "3.3.9", + "@types/dockerode": "3.3.28", "@types/express": "4.17.13", "@types/lunr": "2.3.4", "@types/mocha": "10.0.2", diff --git a/src/LinkTranslator.ts b/src/LinkTranslator.ts index 60bb7380..310ce343 100644 --- a/src/LinkTranslator.ts +++ b/src/LinkTranslator.ts @@ -2,7 +2,7 @@ import RelateUrl from 'relateurl'; -import {LinkMode} from './model/model'; +import {LinkMode} from './model/model.ts'; export function convertExtension(localPath: string, mode?: LinkMode) { const lastSlash = localPath.lastIndexOf('/'); diff --git a/src/cli/odt2md.ts b/src/cli/odt2md.ts new file mode 100644 index 00000000..24f7c9ed --- /dev/null +++ b/src/cli/odt2md.ts @@ -0,0 +1,89 @@ +'use strict'; + +import path from 'path'; +import minimist from 'minimist'; +import {fileURLToPath} from 'url'; +import {Buffer} from 'buffer'; +import fs from 'fs'; + +import {OdtProcessor} from '../odt/OdtProcessor.ts'; +import {UnMarshaller} from '../odt/UnMarshaller.ts'; +import {DocumentContent, DocumentStyles, LIBREOFFICE_CLASSES} from '../odt/LibreOffice.ts'; +import {OdtToMarkdown} from '../odt/OdtToMarkdown.ts'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +process.env.GIT_SHA = process.env.GIT_SHA || 'dev'; + +async function usage() { + const pkg = JSON.parse(new TextDecoder().decode(fs.readFileSync(path.resolve(__dirname, '..', '..', 'package.json')))); + + const commandUsage = 'echo "test" | odt2md\n\nor\n\nodt2md filename.odt'; + + console.log( + `${pkg.name} version: ${pkg.version}, ${process.env.GIT_SHA}\n\nUsage:\n${commandUsage.trim()}\n`); +} + +async function main() { + const inputArr = []; + + process.stdin.on( 'data', function(data) { inputArr.push(data); } ); + + await new Promise(resolve => { + setTimeout(() => { + process.stdin.destroy(); + resolve(null); + }, 50); + process.stdin.on( 'end', resolve); + }); + + const argv = minimist(process.argv.slice(2)); + + if (inputArr.length === 0) { + if (argv._.length < 1 || argv.h || argv.help) { + await usage(); + process.exit(1); + } + + inputArr.push(fs.readFileSync(path.resolve(process.cwd(), argv._[0]))); + } + + if (inputArr.length === 0) { + console.error('No input'); + process.exit(1); + } + + const processor = new OdtProcessor(); + await processor.loadFromBuffer(Buffer.concat(inputArr)); + if (!processor.getContentXml()) { + throw Error('No odt processed'); + } + + const parser = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentContent'); + const document: DocumentContent = parser.unmarshal(processor.getContentXml()); + if (!document) { + throw Error('No document unmarshalled'); + } + const parserStyles = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentStyles'); + const styles: DocumentStyles = parserStyles.unmarshal(processor.getStylesXml()); + if (!styles) { + throw Error('No styles unmarshalled'); + } + const converter = new OdtToMarkdown(document, styles, processor.getFileNameMap(), processor.getXmlMap()); + const markdown = await converter.convert(); + console.log(markdown); +} + +try { + await main(); + process.exit(0); +} catch (err) { + if (err.isUsageError) { + console.error(err.message); + await usage(); + } else { + console.error(err); + } + process.exit(1); +} diff --git a/src/cli/wikigdrive-config.ts b/src/cli/wikigdrive-config.ts index 16d0624f..460cf6d6 100644 --- a/src/cli/wikigdrive-config.ts +++ b/src/cli/wikigdrive-config.ts @@ -5,10 +5,10 @@ import minimist from 'minimist'; import dotenv from 'dotenv'; import {fileURLToPath} from 'url'; -import {addTelemetry} from '../telemetry'; -import {FileContentService} from '../utils/FileContentService'; -import {getAuthConfig} from './getAuthConfig'; -import {usage} from './usage'; +import {addTelemetry} from '../telemetry.ts'; +import {FileContentService} from '../utils/FileContentService.ts'; +import {getAuthConfig} from './getAuthConfig.ts'; +import {usage} from './usage.ts'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); diff --git a/src/cli/wikigdrive-pull.ts b/src/cli/wikigdrive-pull.ts index bb223d5e..9e76701e 100644 --- a/src/cli/wikigdrive-pull.ts +++ b/src/cli/wikigdrive-pull.ts @@ -5,15 +5,17 @@ import minimist from 'minimist'; import dotenv from 'dotenv'; import {fileURLToPath} from 'url'; -import {addTelemetry} from '../telemetry'; -import {GoogleApiContainer} from '../containers/google_api/GoogleApiContainer'; -import {getAuthConfig} from './getAuthConfig'; -import {urlToFolderId} from '../utils/idParsers'; -import {GoogleFolderContainer} from '../containers/google_folder/GoogleFolderContainer'; -import {TransformContainer} from '../containers/transform/TransformContainer'; -import {FolderRegistryContainer} from '../containers/folder_registry/FolderRegistryContainer'; -import {usage} from './usage'; -import {initEngine} from './initEngine'; +import {addTelemetry} from '../telemetry.ts'; +import {GoogleApiContainer} from '../containers/google_api/GoogleApiContainer.ts'; +import {getAuthConfig} from './getAuthConfig.ts'; +import {urlToFolderId} from '../utils/idParsers.ts'; +import {GoogleFolderContainer} from '../containers/google_folder/GoogleFolderContainer.ts'; +import {TransformContainer} from '../containers/transform/TransformContainer.ts'; +import {FolderRegistryContainer} from '../containers/folder_registry/FolderRegistryContainer.ts'; +import {usage, UsageError} from './usage.ts'; +import {initEngine} from './initEngine.ts'; +import {JobManagerContainer} from '../containers/job/JobManagerContainer.ts'; +import {UserConfigService} from '../containers/google_folder/UserConfigService.ts'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -52,7 +54,6 @@ async function main() { service_account: argv['service_account'] || null, }; const authConfig = await getAuthConfig(params, mainFileService); - const apiContainer = new GoogleApiContainer({ name: 'google_api' }, authConfig); await apiContainer.mount(await mainFileService); await containerEngine.registerContainer(apiContainer); @@ -63,9 +64,26 @@ async function main() { await containerEngine.registerContainer(folderRegistryContainer); await folderRegistryContainer.run(); + const jobManagerContainer = new JobManagerContainer({ name: 'job_manager' }); + await jobManagerContainer.mount(await mainFileService); + await containerEngine.registerContainer(jobManagerContainer); + await jobManagerContainer.run(); + const googleFileSystem = await mainFileService.getSubFileService(folderId, '/'); const transformFileSystem = await mainFileService.getSubFileService(folderId + '_transform', '/'); + const userConfigService = new UserConfigService(googleFileSystem); + await userConfigService.load(); + + if (argv['transform_subdir']) { + userConfigService.config.transform_subdir = argv['transform_subdir']; + await userConfigService.save(); + } + + if (!userConfigService.config?.transform_subdir || !userConfigService.config?.transform_subdir.startsWith('/')) { + throw new UsageError('No markdown destination dir specified use --transform_subdir, must start with /'); + } + logger.info('Downloading'); const downloadContainer = new GoogleFolderContainer({ cmd: 'pull', diff --git a/src/containers/action/ActionRunnerContainer.ts b/src/containers/action/ActionRunnerContainer.ts index 8bb3f66b..b3f0ef57 100644 --- a/src/containers/action/ActionRunnerContainer.ts +++ b/src/containers/action/ActionRunnerContainer.ts @@ -1,14 +1,15 @@ -import {Container, ContainerEngine} from '../../ContainerEngine'; -import {FileId} from '../../model/model'; +import * as path from 'path'; +import {fileURLToPath} from 'url'; import winston from 'winston'; import Docker from 'dockerode'; -import {fileURLToPath} from 'url'; -import {BufferWritable} from '../../utils/BufferWritable'; -import {UserConfigService} from '../google_folder/UserConfigService'; import yaml from 'js-yaml'; -import {GitScanner} from '../../git/GitScanner'; -import {FileContentService} from '../../utils/FileContentService'; -import * as path from 'path'; + +import {Container, ContainerEngine} from '../../ContainerEngine.ts'; +import {FileId} from '../../model/model.ts'; +import {BufferWritable} from '../../utils/BufferWritable.ts'; +import {UserConfigService} from '../google_folder/UserConfigService.ts'; +import {GitScanner} from '../../git/GitScanner.ts'; +import {FileContentService} from '../../utils/FileContentService.ts'; const __filename = fileURLToPath(import.meta.url); @@ -20,12 +21,14 @@ export interface ActionStep { export interface ActionDefinition { on: string; + 'run-name'?: string; steps: Array; } export const DEFAULT_ACTIONS: ActionDefinition[] = [ { on: 'transform', + 'run-name': 'AutoCommit and Render', steps: [ { name: 'auto_commit', @@ -39,6 +42,7 @@ export const DEFAULT_ACTIONS: ActionDefinition[] = [ }, { on: 'branch', + 'run-name': 'Commit and Push branch', steps: [ { uses: 'commit_branch' @@ -50,6 +54,7 @@ export const DEFAULT_ACTIONS: ActionDefinition[] = [ }, { on: 'git_reset', + 'run-name': 'Render', steps: [ { name: 'render_hugo', @@ -59,6 +64,7 @@ export const DEFAULT_ACTIONS: ActionDefinition[] = [ }, { on: 'git_pull', + 'run-name': 'Render', steps: [ { name: 'render_hugo', @@ -68,6 +74,12 @@ export const DEFAULT_ACTIONS: ActionDefinition[] = [ } ]; +export async function convertActionYaml(actionYaml: string): Promise { + const actionDefs: ActionDefinition[] = actionYaml ? yaml.load(actionYaml) : DEFAULT_ACTIONS; + return actionDefs; +} + + export class ActionRunnerContainer extends Container { private logger: winston.Logger; private generatedFileService: FileContentService; @@ -80,11 +92,6 @@ export class ActionRunnerContainer extends Container { this.logger = engine.logger.child({ filename: __filename, driveId: this.params.name, jobId: this.params.jobId }); } - async convertActionYaml(actionYaml: string): Promise { - const actionDefs: ActionDefinition[] = actionYaml ? yaml.load(actionYaml) : DEFAULT_ACTIONS; - return actionDefs; - } - async mount3(fileService: FileContentService, destFileService: FileContentService, tempFileService: FileContentService): Promise { this.filesService = fileService; this.generatedFileService = destFileService; @@ -148,6 +155,8 @@ export class ActionRunnerContainer extends Container { let result; + await this.generatedFileService.remove('resources'); + if (themeId) { const env = ['render_hugo', 'exec', 'commit_branch'].includes(step.uses) ? Object.assign({ CONFIG_TOML: '/site/tmp_dir/config.toml', @@ -167,19 +176,44 @@ export class ActionRunnerContainer extends Container { -v "${process.env.VOLUME_DATA}${contentDir}:/site/content" \\ -v "${process.env.VOLUME_PREVIEW}/${driveId}/${themeId}:/site/public" \\ -v "${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/tmp_dir" \\ - -v "${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/resources" \\ + --mount type=tmpfs,destination=/site/resources" \ ${Object.keys(env).map(key => `--env ${key}="${env[key]}"`).join(' ')} \\ ${process.env.ACTION_IMAGE} /steps/step_${step.uses} `); result = await docker.run(process.env.ACTION_IMAGE, [`/steps/step_${step.uses}`], writable, { HostConfig: { - Binds: [ - `${process.env.VOLUME_DATA}/${driveId}_transform:/repo`, - `${process.env.VOLUME_DATA}${contentDir}:/site/content`, - `${process.env.VOLUME_PREVIEW}/${driveId}/${themeId}:/site/public`, - `${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/tmp_dir`, - `${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/resources` + Mounts: [ + { + Source: `${process.env.VOLUME_DATA}/${driveId}_transform`, + Target: '/repo', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_DATA}${contentDir}`, + Target: '/site/content', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_PREVIEW}/${driveId}/${themeId}`, + Target: '/site/public', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_DATA}/${driveId}/tmp_dir`, + Target: '/site/tmp_dir', + Type: 'bind' + }, + { + Source: '', + Target: '/site/resources', + Type: 'tmpfs', + ReadOnly: false, + TmpfsOptions: { + SizeBytes: undefined, + Mode: 0o777 + } + } ] }, Env: Object.keys(env).map(key => `${key}=${env[key]}`), @@ -202,20 +236,49 @@ export class ActionRunnerContainer extends Container { -v "${process.env.VOLUME_DATA}${contentDir}:/site/content" \\ -v "${process.env.VOLUME_PREVIEW}/${driveId}/_manual:/site/public" \\ -v "${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/tmp_dir" \\ - -v "${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/resources" \\ + --mount type=tmpfs,destination=/site/resources" \\ ${Object.keys(env).map(key => `--env ${key}="${env[key]}"`).join(' ')} \\ ${process.env.ACTION_IMAGE} /steps/step_${step.uses} `); result = await docker.run(process.env.ACTION_IMAGE, [`/steps/step_${step.uses}`], writable, { HostConfig: { - Binds: [ - `${process.env.VOLUME_DATA}/${driveId}_transform:/repo`, - `${process.env.VOLUME_DATA}/${driveIdTransform}:/site`, - `${process.env.VOLUME_DATA}${contentDir}:/site/content`, - `${process.env.VOLUME_PREVIEW}/${driveId}/_manual:/site/public`, - `${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/tmp_dir`, - `${process.env.VOLUME_DATA}/${driveId}/tmp_dir:/site/resources` + Mounts: [ + { + Source: `${process.env.VOLUME_DATA}/${driveId}_transform`, + Target: '/repo', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_DATA}/${driveIdTransform}`, + Target: '/site', + Type: 'bind', + }, + { + Source: `${process.env.VOLUME_DATA}${contentDir}`, + Target: '/site/content', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_PREVIEW}/${driveId}/_manual`, + Target: '/site/public', + Type: 'bind' + }, + { + Source: `${process.env.VOLUME_DATA}/${driveId}/tmp_dir`, + Target: '/site/tmp_dir', + Type: 'bind' + }, + { + Source: '', + Target: '/site/resources', + Type: 'tmpfs', + ReadOnly: false, + TmpfsOptions: { + SizeBytes: undefined, + Mode: 0o777 + } + } ] }, Env: Object.keys(env).map(key => `${key}=${env[key]}`), @@ -246,7 +309,7 @@ export class ActionRunnerContainer extends Container { this.isErr = false; - const actionDefs = await this.convertActionYaml(config.actions_yaml); + const actionDefs = await convertActionYaml(config.actions_yaml); for (const actionDef of actionDefs) { if (actionDef.on !== this.params['trigger']) { continue; diff --git a/src/containers/folder_registry/FolderRegistryContainer.ts b/src/containers/folder_registry/FolderRegistryContainer.ts index 89c7c81e..0354bba2 100644 --- a/src/containers/folder_registry/FolderRegistryContainer.ts +++ b/src/containers/folder_registry/FolderRegistryContainer.ts @@ -1,10 +1,11 @@ -import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine'; import winston from 'winston'; -import {FileId} from '../../model/model'; -import {GoogleApiContainer} from '../google_api/GoogleApiContainer'; - import { fileURLToPath } from 'url'; -import {GoogleDriveServiceError} from '../../google/driveFetch'; + +import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine.ts'; +import {FileId} from '../../model/model.ts'; +import {GoogleApiContainer} from '../google_api/GoogleApiContainer.ts'; +import {GoogleDriveServiceError} from '../../google/driveFetch.ts'; + const __filename = fileURLToPath(import.meta.url); export interface Drive { diff --git a/src/containers/google_api/GoogleApiContainer.ts b/src/containers/google_api/GoogleApiContainer.ts index a9e09d3b..1718606d 100644 --- a/src/containers/google_api/GoogleApiContainer.ts +++ b/src/containers/google_api/GoogleApiContainer.ts @@ -1,14 +1,15 @@ -import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine'; import winston from 'winston'; -import {QuotaLimiter} from '../../google/QuotaLimiter'; -import {GoogleDriveService} from '../../google/GoogleDriveService'; -import {AuthConfig} from '../../model/AccountJson'; -import {Drive, Permission} from '../folder_registry/FolderRegistryContainer'; -import {FileId} from '../../model/model'; -import {GoogleFile} from '../../model/GoogleFile'; + +import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine.ts'; +import {QuotaLimiter} from '../../google/QuotaLimiter.ts'; +import {GoogleDriveService} from '../../google/GoogleDriveService.ts'; +import {AuthConfig} from '../../model/AccountJson.ts'; +import {Drive, Permission} from '../folder_registry/FolderRegistryContainer.ts'; +import {FileId} from '../../model/model.ts'; +import {GoogleFile} from '../../model/GoogleFile.ts'; +import {GoogleAuth, HasAccessToken, UserAuthClient, ServiceAuthClient, getCliCode} from '../../google/AuthClient.ts'; import {fileURLToPath} from 'url'; -import {GoogleAuth, HasAccessToken, UserAuthClient, ServiceAuthClient, getCliCode} from '../../google/AuthClient'; const __filename = fileURLToPath(import.meta.url); diff --git a/src/containers/google_folder/GoogleFolderContainer.ts b/src/containers/google_folder/GoogleFolderContainer.ts index f8afab32..15f80a07 100644 --- a/src/containers/google_folder/GoogleFolderContainer.ts +++ b/src/containers/google_folder/GoogleFolderContainer.ts @@ -1,15 +1,16 @@ -import {Container, ContainerConfig, ContainerConfigArr, ContainerEngine} from '../../ContainerEngine'; import winston from 'winston'; -import {GoogleDriveService} from '../../google/GoogleDriveService'; -import {GoogleApiContainer} from '../google_api/GoogleApiContainer'; -import {QueueDownloader} from './QueueDownloader'; -import {TaskFetchFolder} from './TaskFetchFolder'; -import {MimeTypes} from '../../model/GoogleFile'; -import {DateISO, FileId} from '../../model/model'; import {fileURLToPath} from 'url'; -import {FolderRegistryContainer} from '../folder_registry/FolderRegistryContainer'; -import {GoogleTreeProcessor} from './GoogleTreeProcessor'; -import {HasAccessToken} from '../../google/AuthClient'; + +import {Container, ContainerConfig, ContainerConfigArr, ContainerEngine} from '../../ContainerEngine.ts'; +import {GoogleDriveService} from '../../google/GoogleDriveService.ts'; +import {GoogleApiContainer} from '../google_api/GoogleApiContainer.ts'; +import {QueueDownloader} from './QueueDownloader.ts'; +import {TaskFetchFolder} from './TaskFetchFolder.ts'; +import {MimeTypes} from '../../model/GoogleFile.ts'; +import {DateISO, FileId} from '../../model/model.ts'; +import {FolderRegistryContainer} from '../folder_registry/FolderRegistryContainer.ts'; +import {GoogleTreeProcessor} from './GoogleTreeProcessor.ts'; +import {HasAccessToken} from '../../google/AuthClient.ts'; const __filename = fileURLToPath(import.meta.url); diff --git a/src/containers/google_folder/UploadContainer.ts b/src/containers/google_folder/UploadContainer.ts index c6bfd460..ba966c04 100644 --- a/src/containers/google_folder/UploadContainer.ts +++ b/src/containers/google_folder/UploadContainer.ts @@ -197,6 +197,10 @@ export class UploadContainer extends Container { for (const file of Object.values(files)) { const fullPath = parentPath + '/' + file.fileName; + if (fullPath === '/toc.md') { + continue; + } + if (!file.title) { this.logger.warn(`Skipping upload: ${fullPath}. No title, check frontmatter.`); continue; diff --git a/src/containers/job/JobManagerContainer.ts b/src/containers/job/JobManagerContainer.ts index 6663dac1..05e134d0 100644 --- a/src/containers/job/JobManagerContainer.ts +++ b/src/containers/job/JobManagerContainer.ts @@ -1,23 +1,23 @@ -import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine'; -import {FileId} from '../../model/model'; -import {GoogleFolderContainer} from '../google_folder/GoogleFolderContainer'; -import {TransformContainer} from '../transform/TransformContainer'; - -import {fileURLToPath} from 'url'; -import {UserConfigService} from '../google_folder/UserConfigService'; -import {MarkdownTreeProcessor} from '../transform/MarkdownTreeProcessor'; -import {WorkerPool} from './WorkerPool'; import os from 'os'; -import {GitScanner} from '../../git/GitScanner'; -import {FileContentService} from '../../utils/FileContentService'; -import {CACHE_PATH} from '../server/routes/FolderController'; -import {FolderRegistryContainer} from '../folder_registry/FolderRegistryContainer'; -import {ActionRunnerContainer} from '../action/ActionRunnerContainer'; +import {fileURLToPath} from 'url'; import fs from 'fs'; import path from 'path'; import { randomUUID } from 'crypto'; -import {getContentFileService} from '../transform/utils'; -import {UploadContainer} from '../google_folder/UploadContainer'; + +import {Container, ContainerConfig, ContainerEngine} from '../../ContainerEngine.ts'; +import {FileId} from '../../model/model.ts'; +import {GoogleFolderContainer} from '../google_folder/GoogleFolderContainer.ts'; +import {TransformContainer} from '../transform/TransformContainer.ts'; +import {UserConfigService} from '../google_folder/UserConfigService.ts'; +import {MarkdownTreeProcessor} from '../transform/MarkdownTreeProcessor.ts'; +import {WorkerPool} from './WorkerPool.ts'; +import {GitScanner} from '../../git/GitScanner.ts'; +import {FileContentService} from '../../utils/FileContentService.ts'; +import {CACHE_PATH} from '../server/routes/FolderController.ts'; +import {FolderRegistryContainer} from '../folder_registry/FolderRegistryContainer.ts'; +import {ActionRunnerContainer, convertActionYaml} from '../action/ActionRunnerContainer.ts'; +import {getContentFileService} from '../transform/utils.ts'; +import {UploadContainer} from '../google_folder/UploadContainer.ts'; const __filename = fileURLToPath(import.meta.url); @@ -34,7 +34,7 @@ export function initJob(): { id: string, state: JobState } { export interface Job { id: string; state: JobState; - progress?: { total: number; completed: number; warnings: number }; + progress?: { total: number; completed: number; warnings: number; failed?: number }; type: JobType; title: string; trigger?: string; @@ -187,6 +187,17 @@ export class JobManagerContainer extends Container { if (driveJobs.jobs.find(subJob => subJob.type === 'run_action' && notCompletedJob(subJob))) { return; } + { + const googleFileSystem = await this.filesService.getSubFileService(driveId, '/'); + const userConfigService = new UserConfigService(googleFileSystem); + await userConfigService.load(); + const config = userConfigService.config; + const actionDefs = await convertActionYaml(config.actions_yaml); + const action = actionDefs.find(action => action.on === job.trigger); + if (action && action['run-name']) { + job.title = action['run-name']; + } + } driveJobs.jobs.push(job); break; case 'transform': @@ -700,11 +711,7 @@ export class JobManagerContainer extends Container { for (const fileToRemove of removeFilePaths .filter(path => path.endsWith('.md')) .map(path => path.substring(0, path.length - 3) + '.assets')) { - - if (!await transformedFileSystem.exists(fileToRemove)) { - continue; - } - removeFileAssetsPaths.push(fileToRemove + '/' + fileToRemove); + removeFileAssetsPaths.push(fileToRemove); } filePaths.push(...fileAssetsPaths); diff --git a/src/containers/job/worker.ts b/src/containers/job/worker.ts index 13071079..a596acc7 100644 --- a/src/containers/job/worker.ts +++ b/src/containers/job/worker.ts @@ -1,5 +1,6 @@ import { parentPort } from 'worker_threads'; -import {executeOdtToMarkdown} from '../../odt/executeOdtToMarkdown'; + +import {executeOdtToMarkdown} from '../../odt/executeOdtToMarkdown.ts'; parentPort.on('message', async (msg) => { try { diff --git a/src/containers/server/routes/PreviewController.ts b/src/containers/server/routes/PreviewController.ts index 62d0e71a..cfed38e1 100644 --- a/src/containers/server/routes/PreviewController.ts +++ b/src/containers/server/routes/PreviewController.ts @@ -1,13 +1,13 @@ +import {Logger} from 'winston'; + import { Controller, RouteErrorHandler, - RouteParamPath, RouteResponse, RouteUse -} from './Controller'; -import {Logger, QueryOptions} from 'winston'; -import {extToMime, ShareErrorHandler} from './FolderController'; -import {FileContentService} from '../../../utils/FileContentService'; +} from './Controller.ts'; +import {extToMime, ShareErrorHandler} from './FolderController.ts'; +import {FileContentService} from '../../../utils/FileContentService.ts'; export class PreviewController extends Controller { private fileSystem: FileContentService; @@ -21,10 +21,10 @@ export class PreviewController extends Controller { @RouteUse('/:driveId') @RouteResponse('stream') @RouteErrorHandler(new ShareErrorHandler()) - async getFolder(@RouteParamPath('driveId') driveId: string) { - let filePath = this.req.originalUrl.replace('/preview', '') || '/'; + async getFolder() { + const relativeUrl = this.req.originalUrl || '/'; - filePath = filePath.replace(/\?.*$/, ''); + let filePath = relativeUrl.replace('/preview', '').replace(/\?.*$/, ''); if (!await this.fileSystem.exists(filePath)) { this.queryLogger.warn(`Not found: ${filePath}`); @@ -33,6 +33,14 @@ export class PreviewController extends Controller { } if (await this.fileSystem.isDirectory(filePath)) { + if (!relativeUrl.endsWith('/')) { + this.res + .status(301) + .setHeader('location', relativeUrl + '/') + .send(); + return; + } + filePath = filePath + '/index.html'; } diff --git a/src/containers/transform/TaskLocalFileTransform.ts b/src/containers/transform/TaskLocalFileTransform.ts index 58634f1b..23214da0 100644 --- a/src/containers/transform/TaskLocalFileTransform.ts +++ b/src/containers/transform/TaskLocalFileTransform.ts @@ -136,13 +136,16 @@ export class TaskLocalFileTransform extends QueueTask { const rewriteRules = this.userConfig.rewrite_rules || []; + const picturesDirAbsolute = destinationPath + '/' + this.realFileName.replace(/.md$/, '.assets/'); + if (SINGLE_THREADED_TRANSFORM) { - const processor = new OdtProcessor(odtPath, true); - await processor.load(); + const processor = new OdtProcessor(true); + await processor.load(odtPath); await processor.unzipAssets(destinationPath, this.realFileName); const content = processor.getContentXml(); const stylesXml = processor.getStylesXml(); const fileNameMap = processor.getFileNameMap(); + const xmlMap = processor.getXmlMap(); const parser = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentContent'); const document = parser.unmarshal(content); @@ -153,12 +156,12 @@ export class TaskLocalFileTransform extends QueueTask { throw Error('No styles unmarshalled'); } - const converter = new OdtToMarkdown(document, styles, fileNameMap); + const converter = new OdtToMarkdown(document, styles, fileNameMap, xmlMap); converter.setRewriteRules(rewriteRules); if (this.realFileName === '_index.md') { - converter.setPicturesDir('./' + this.realFileName.replace(/.md$/, '.assets/')); + converter.setPicturesDir('./' + this.realFileName.replace(/.md$/, '.assets/'), picturesDirAbsolute); } else { - converter.setPicturesDir('../' + this.realFileName.replace(/.md$/, '.assets/')); + converter.setPicturesDir('../' + this.realFileName.replace(/.md$/, '.assets/'), picturesDirAbsolute); } markdown = await converter.convert(); links = Array.from(converter.links); @@ -173,9 +176,10 @@ export class TaskLocalFileTransform extends QueueTask { errors: Array; } - const workerResult: WorkerResult = await this.jobManagerContainer.scheduleWorker('OdtToMarkdown', { + const workerResult: WorkerResult = await this.jobManagerContainer.scheduleWorker('OdtToMarkdown', { localFile, realFileName: this.realFileName, + picturesDirAbsolute, odtPath, destinationPath, rewriteRules, diff --git a/src/containers/transform/TransformContainer.ts b/src/containers/transform/TransformContainer.ts index bc989d95..8f82d516 100644 --- a/src/containers/transform/TransformContainer.ts +++ b/src/containers/transform/TransformContainer.ts @@ -1,32 +1,30 @@ -import winston from 'winston'; -import {Container, ContainerConfig, ContainerConfigArr, ContainerEngine} from '../../ContainerEngine'; -import {FileContentService} from '../../utils/FileContentService'; -import {appendConflict, DirectoryScanner, stripConflict} from './DirectoryScanner'; -import {GoogleFilesScanner} from './GoogleFilesScanner'; -import {convertToRelativeMarkDownPath, convertToRelativeSvgPath} from '../../LinkTranslator'; -import {LocalFilesGenerator} from './LocalFilesGenerator'; -import {QueueTransformer} from './QueueTransformer'; -import {generateNavigationHierarchy, NavigationHierarchy} from './generateNavigationHierarchy'; -import {ConflictFile, LocalFile, LocalFileMap, RedirFile} from '../../model/LocalFile'; -import {TaskLocalFileTransform} from './TaskLocalFileTransform'; -import {GoogleFile, MimeTypes} from '../../model/GoogleFile'; -import {generateDirectoryYaml, parseDirectoryYaml} from './frontmatters/generateDirectoryYaml'; -import {getContentFileService, removeMarkDownsAndImages} from './utils'; -import {LocalLog} from './LocalLog'; -import {LocalLinks} from './LocalLinks'; -import {OdtProcessor} from '../../odt/OdtProcessor'; -import {UnMarshaller} from '../../odt/UnMarshaller'; -import {DocumentContent, LIBREOFFICE_CLASSES} from '../../odt/LibreOffice'; -import {TaskRedirFileTransform} from './TaskRedirFileTransform'; -import {TocGenerator} from './frontmatters/TocGenerator'; -import {FileId} from '../../model/model'; import {fileURLToPath} from 'url'; -import {MarkdownTreeProcessor} from './MarkdownTreeProcessor'; -import {LunrIndexer} from '../search/LunrIndexer'; -import {JobManagerContainer} from '../job/JobManagerContainer'; -import {UserConfigService} from '../google_folder/UserConfigService'; +import winston from 'winston'; import Transport from 'winston-transport'; +import {Container, ContainerConfig, ContainerConfigArr, ContainerEngine} from '../../ContainerEngine.ts'; +import {FileContentService} from '../../utils/FileContentService.ts'; +import {appendConflict, DirectoryScanner, stripConflict} from './DirectoryScanner.ts'; +import {GoogleFilesScanner} from './GoogleFilesScanner.ts'; +import {convertToRelativeMarkDownPath, convertToRelativeSvgPath} from '../../LinkTranslator.ts'; +import {LocalFilesGenerator} from './LocalFilesGenerator.ts'; +import {QueueTransformer} from './QueueTransformer.ts'; +import {NavigationHierarchy} from './generateNavigationHierarchy.ts'; +import {ConflictFile, LocalFile, RedirFile} from '../../model/LocalFile.ts'; +import {TaskLocalFileTransform} from './TaskLocalFileTransform.ts'; +import {MimeTypes} from '../../model/GoogleFile.ts'; +import {generateDirectoryYaml, parseDirectoryYaml} from './frontmatters/generateDirectoryYaml.ts'; +import {getContentFileService, removeMarkDownsAndImages} from './utils.ts'; +import {LocalLog} from './LocalLog.ts'; +import {LocalLinks} from './LocalLinks.ts'; +import {TaskRedirFileTransform} from './TaskRedirFileTransform.ts'; +import {TocGenerator} from './frontmatters/TocGenerator.ts'; +import {FileId} from '../../model/model.ts'; +import {MarkdownTreeProcessor} from './MarkdownTreeProcessor.ts'; +import {LunrIndexer} from '../search/LunrIndexer.ts'; +import {JobManagerContainer} from '../job/JobManagerContainer.ts'; +import {UserConfigService} from '../google_folder/UserConfigService.ts'; + const __filename = fileURLToPath(import.meta.url); function doesExistIn(googleFolderFiles: LocalFile[], localFile: LocalFile) { @@ -177,7 +175,7 @@ export class TransformLog extends Transport { super(options); } - log(info, callback) { + log(info: { level: string, errorMdFile: string, errorMdMsg: string }, next: () => void) { switch (info.level) { case 'error': case 'warn': @@ -191,8 +189,8 @@ export class TransformLog extends Transport { } } - if (callback) { - callback(null, true); + if (next) { + next(); } } } @@ -404,16 +402,6 @@ export class TransformContainer extends Container { await markdownTreeProcessor.regenerateTree(rootFolderId); await markdownTreeProcessor.save(); - this.hierarchy = await this.loadNavigationHierarchy(); - for (const k in this.hierarchy) { - const item = this.hierarchy[k]; - if (item.identifier) { - const [, path] = await markdownTreeProcessor.findById(item.identifier); - item.pageRef = path; - } - } - await this.writeHugoMenu(this.hierarchy); - const indexer = new LunrIndexer(); await markdownTreeProcessor.walkTree((page) => { indexer.addPage(page); @@ -526,35 +514,6 @@ export class TransformContainer extends Container { async destroy(): Promise { } - async writeHugoMenu(hierarchy: NavigationHierarchy) { - const menus = { - main: Object.values(hierarchy) - }; - - await this.generatedFileService.mkdir('config/_default'); - await this.generatedFileService.writeJson('config/_default/menu.en.json', menus); - } - - async loadNavigationHierarchy(): Promise { - const googleFiles: GoogleFile[] = await this.filesService.readJson('.folder-files.json') || []; - - const navigationFile = googleFiles.find(googleFile => googleFile.name === '.navigation' || googleFile.name === 'navigation'); - if (navigationFile) { - const odtPath = this.filesService.getRealPath() + '/' + navigationFile.id + '.odt'; - const processor = new OdtProcessor(odtPath); - await processor.load(); - const content = processor.getContentXml(); - const parser = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentContent'); - const navDoc: DocumentContent = parser.unmarshal(content); - - if (navDoc) { - return await generateNavigationHierarchy(navDoc, this.logger); - } - } - - return {}; - } - onProgressNotify(callback: ({total, completed, warnings, failed}: { total?: number; completed?: number, warnings?: number, failed?: number }) => void) { this.progressNotifyCallback = callback; } diff --git a/src/containers/transform/generateNavigationHierarchy.ts b/src/containers/transform/generateNavigationHierarchy.ts index 54f1e72a..b8f87b64 100644 --- a/src/containers/transform/generateNavigationHierarchy.ts +++ b/src/containers/transform/generateNavigationHierarchy.ts @@ -1,8 +1,10 @@ 'use strict'; -import {urlToFolderId} from '../../utils/idParsers'; -import {FileId} from '../../model/model'; -import {DocumentContent, TextLink, TextList, TextParagraph, TextSpace, TextSpan, TextTab} from '../../odt/LibreOffice'; +// Legacy menu generator, use: navigation2menu.mjs + +import {urlToFolderId} from '../../utils/idParsers.ts'; +import {FileId} from '../../model/model.ts'; +import {DocumentContent, TextLink, TextList, TextParagraph, TextSpace, TextSpan, TextTab} from '../../odt/LibreOffice.ts'; export interface NavigationHierarchyNode { name: string; @@ -107,17 +109,15 @@ function processPara(para: TextParagraph, ctx: NavigationProcessContext, level: function processList(textList: TextList, ctx: NavigationProcessContext, level = 0) { for (const textListItem of textList.list) { for (const paraOrList of textListItem.list) { - if (paraOrList.type === 'list') { - processList(paraOrList, ctx, level + 1); - continue; - } - - if (paraOrList.type === 'paragraph') { - processPara(paraOrList, ctx, level); - continue; + switch (paraOrList.type) { + case 'list': + processList(paraOrList, ctx, level + 1); + break; + case 'paragraph': + processPara(paraOrList, ctx, level); + break; } } - } } diff --git a/src/git/GitScanner.ts b/src/git/GitScanner.ts index b71889f5..a7289ae2 100644 --- a/src/git/GitScanner.ts +++ b/src/git/GitScanner.ts @@ -87,7 +87,7 @@ export class GitScanner { return fs.existsSync(path.join(this.rootPath, '.git')); } - async changes(): Promise { + async changes(opts: { includeAssets: boolean } = { includeAssets: false }): Promise { const retVal: { [path: string]: GitChange & { cnt: number } } = {}; const skipOthers = false; @@ -114,7 +114,11 @@ export class GitScanner { } try { - const result = await this.exec('git --no-pager diff HEAD --name-status -- \':!**/*.assets/*.png\'', { skipLogger: true }); + const cmd = !opts.includeAssets ? + 'git --no-pager diff HEAD --name-status -- \':!**/*.assets/*.png\'' : + 'git --no-pager diff HEAD --name-status --'; + + const result = await this.exec(cmd, { skipLogger: true }); for (const line of result.stdout.split('\n')) { const parts = line.split(/\s/); const path = parts[parts.length - 1]; @@ -149,7 +153,7 @@ export class GitScanner { .replace(/^"/, '') .replace(/"$/, ''); - if (path.indexOf('.assets/') > -1) { + if (path.indexOf('.assets/') > -1 && !opts.includeAssets) { const idx = path.indexOf('.assets/'); const mdPath = path.substring(0, idx) + '.md'; addEntry(mdPath, { isModified: true }, 1); @@ -184,7 +188,7 @@ export class GitScanner { const chunk = removedFiles.splice(0, 400); const rmParam = chunk.map(fileName => `"${sanitize(fileName)}"`).join(' '); if (rmParam) { - await this.exec(`git rm ${rmParam}`); + await this.exec(`git rm -r ${rmParam}`); } } @@ -207,7 +211,7 @@ export class GitScanner { remoteBranch = 'master'; } - await this.exec(`git pull --rebase origin ${remoteBranch}:master`, { + await this.exec(`git pull --autostash --rebase origin ${remoteBranch}:master`, { env: { GIT_SSH_COMMAND: sshParams?.privateKeyFile ? `ssh -i ${sanitize(sshParams.privateKeyFile)} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes` : undefined } diff --git a/src/google/markdownToHtml.ts b/src/google/markdownToHtml.ts index 8bd43bbe..cbbdb32f 100644 --- a/src/google/markdownToHtml.ts +++ b/src/google/markdownToHtml.ts @@ -1,10 +1,11 @@ import {marked} from 'marked'; +import {frontmatter} from '../containers/transform/frontmatters/frontmatter.ts'; export async function markdownToHtml(buffer: Buffer): Promise { const renderer = { paragraph(text: string) { return `

${text}


\n`; - } + }, // code(code: string, infostring: string | undefined, escaped: boolean) { // if (code.endsWith('\n')) { // code = code + '\n'; @@ -17,18 +18,20 @@ export async function markdownToHtml(buffer: Buffer): Promise { // image(href: string, title: string, text: string) { // return `${title}`; // }, - // heading(text, level) { - // const escapedText = text.toLowerCase().replace(/[^\w]+/g, '-'); - // return ` - // ${text} - // `; - // } + heading(text, level) { + const escapedText = text.toLowerCase().replace(/[^\w]+/g, ' ').trim().replaceAll(' ', '-'); + return `${text}\n`; + // return ` + // ${text} + // `; + } }; marked.use({ renderer }); const md = new TextDecoder().decode(buffer); - const html = marked.parse(md, { pedantic: false, hooks: { + const parsed = frontmatter(md); + const html = marked.parse(parsed.content, { pedantic: false, hooks: { preprocess: (markdown: string) => markdown, postprocess(html: string) { const style = '\n'; diff --git a/src/model/CliParams.ts b/src/model/CliParams.ts index bc86269f..483d3230 100644 --- a/src/model/CliParams.ts +++ b/src/model/CliParams.ts @@ -1,9 +1,5 @@ -import {LinkMode} from './model'; - export interface CliParams { - // link_mode: LinkMode; // TODO: remove ??? workdir: string; - // drive: string; // TODO: remove ??? args: string[]; debug: string[]; diff --git a/src/model/GoogleFile.ts b/src/model/GoogleFile.ts index b70845c8..02c18d38 100644 --- a/src/model/GoogleFile.ts +++ b/src/model/GoogleFile.ts @@ -1,4 +1,4 @@ -import {DateISO, FileId} from './model'; +import {DateISO, FileId} from './model.ts'; export interface SimpleFile { id: FileId; diff --git a/src/model/LocalFile.ts b/src/model/LocalFile.ts index c0134668..6281141f 100644 --- a/src/model/LocalFile.ts +++ b/src/model/LocalFile.ts @@ -1,4 +1,4 @@ -import {DateISO, FileId} from './model'; +import {DateISO, FileId} from './model.ts'; export interface CommonFileData { title: string; diff --git a/src/model/TreeItem.ts b/src/model/TreeItem.ts index 636bfe2e..2865669f 100644 --- a/src/model/TreeItem.ts +++ b/src/model/TreeItem.ts @@ -1,5 +1,5 @@ -import {CommonFileData, ConflictData} from './LocalFile'; -import {FileId} from './model'; +import {CommonFileData, ConflictData} from './LocalFile.ts'; +import {FileId} from './model.ts'; export interface TreeItem extends CommonFileData { parentId: FileId; diff --git a/src/odt/LibreOffice.ts b/src/odt/LibreOffice.ts index 507a0b03..b38243c7 100644 --- a/src/odt/LibreOffice.ts +++ b/src/odt/LibreOffice.ts @@ -1,4 +1,4 @@ -import {XmlAttribute, XmlElement, XmlElementChild, XmlRootElement, XmlText} from './UnMarshaller'; +import {XmlAttribute, XmlElement, XmlElementChild, XmlRootElement, XmlText} from './UnMarshaller.ts'; // TODO https://git.libreoffice.org/core/+/refs/heads/master/schema/libreoffice/OpenDocument-v1.3+libreoffice-schema.rng @@ -357,8 +357,10 @@ export class ListLevelStyleBullet { @XmlElement() @XmlAttribute('text:level', 'level') +@XmlAttribute('text:start-value', 'startValue') export class ListLevelStyleNumber { level = 0; + startValue = 0; } @XmlElement() diff --git a/src/odt/MarkdownNodes.ts b/src/odt/MarkdownNodes.ts index 736195b5..fe8679b1 100644 --- a/src/odt/MarkdownNodes.ts +++ b/src/odt/MarkdownNodes.ts @@ -15,6 +15,7 @@ export type TAG = 'BODY' | 'HR/' | 'B' | 'I' | 'BI' | 'BLANK/' | // | '/B' | '/I 'TOC' | 'SVG/' | 'IMG/' | // | '/TOC' 'EMB_SVG' | 'EMB_SVG_G' | 'EMB_SVG_P/' | 'EMB_SVG_TEXT' | // | '/EMB_SVG' | '/EMB_SVG_G' | '/EMB_SVG_TEXT' 'EMB_SVG_TSPAN' | // | '/EMB_SVG_TSPAN' + 'MATHML' | 'CHANGE_START' | 'CHANGE_END' | 'RAW_MODE/' | 'HTML_MODE/' | 'MD_MODE/' | 'MACRO_MODE/' | 'COMMENT'; export interface TagPayload { diff --git a/src/odt/OdtProcessor.ts b/src/odt/OdtProcessor.ts index e25adcb0..61e04123 100644 --- a/src/odt/OdtProcessor.ts +++ b/src/odt/OdtProcessor.ts @@ -16,17 +16,19 @@ export class OdtProcessor { private stylesXml: string; private files: { [p: string]: JSZip.JSZipObject }; private fileNameMap: { [name: string]: string }; + private xmlMap: { [name: string]: string }; - constructor(private odtPath: string, private contentAddressable = false) { + constructor(private contentAddressable = false) { this.fileNameMap = {}; + this.xmlMap = {}; } - async load() { - if (!fs.existsSync(this.odtPath)) { + async load(odtPath: string) { + if (!fs.existsSync(odtPath)) { return; } const jsZip = new JSZip(); - const input: Buffer = fs.readFileSync(this.odtPath); + const input: Buffer = fs.readFileSync(odtPath); const zip = await jsZip.loadAsync(input); this.files = zip.folder('').files; @@ -37,6 +39,43 @@ export class OdtProcessor { if (this.files['styles.xml']) { this.stylesXml = await this.files['styles.xml'].async('string'); } + + await this.processMathMl(); + } + + async loadFromBuffer(input: Buffer): Promise { + const jsZip = new JSZip(); + const zip = await jsZip.loadAsync(input); + + this.files = zip.folder('').files; + + if (this.files['content.xml']) { + this.contentXml = await this.files['content.xml'].async('string'); + } + if (this.files['styles.xml']) { + this.stylesXml = await this.files['styles.xml'].async('string'); + } + + await this.processMathMl(); + } + + async processMathMl() { + for (const relativePath in this.files) { + if (!relativePath.endsWith('/content.xml')) { + continue; + } + + const fileName = relativePath.replace('/content.xml', '.xml').replace(/\s/g, '_'); + if (fileName.indexOf('/') === -1) { + const entry = this.files[relativePath]; + const buffer = await entry.async('nodebuffer'); + + const mathMl = new TextDecoder().decode(buffer); + if (mathMl.indexOf(' -1) { + this.xmlMap[fileName] = mathMl; + } + } + } } async unzipAssets(destinationPath: string, destinationName: string) { @@ -99,4 +138,8 @@ export class OdtProcessor { return this.fileNameMap; } + getXmlMap() { + return this.xmlMap; + } + } diff --git a/src/odt/OdtToMarkdown.ts b/src/odt/OdtToMarkdown.ts index ba886876..4c2c633e 100644 --- a/src/odt/OdtToMarkdown.ts +++ b/src/odt/OdtToMarkdown.ts @@ -1,3 +1,5 @@ +import { MathMLToLaTeX } from 'mathml-to-latex'; + import { DocumentContent, DocumentStyles, DrawCustomShape, DrawEnhancedGeometry, DrawFrame, DrawG, @@ -31,7 +33,7 @@ function getBaseFileName(fileName) { const COURIER_FONTS = ['Courier New', 'Courier']; -interface FileNameMap { +interface StringToStringMap { [name: string]: string } @@ -64,9 +66,10 @@ export class OdtToMarkdown { public readonly links: Set = new Set(); private readonly chunks: MarkdownNodes = new MarkdownNodes(); private picturesDir = ''; + private picturesDirAbsolute = ''; private rewriteRules: RewriteRule[] = []; - constructor(private document: DocumentContent, private documentStyles: DocumentStyles, private fileNameMap: FileNameMap = {}) { + constructor(private document: DocumentContent, private documentStyles: DocumentStyles, private fileNameMap: StringToStringMap = {}, private xmlMap: StringToStringMap = {}) { } getStyle(styleName: string): Style { @@ -378,7 +381,21 @@ export class OdtToMarkdown { } async drawFrameToText(currentTagNode: MarkdownTagNode, drawFrame: DrawFrame) { - if (drawFrame.object) { // TODO: MathML + if (drawFrame.object) { + if (drawFrame.object.href) { + const fileName= drawFrame.object.href.replace(/\s/g, '_').replace(/^\.\//, '') + '.xml'; + try { + const mathMl = this.xmlMap[fileName]; + if (mathMl && mathMl.indexOf(' -1) { + const node = this.chunks.createNode('MATHML'); + const latex = MathMLToLaTeX.convert(mathMl); + this.chunks.appendText(node, latex); + this.chunks.append(currentTagNode, node); + } + } catch (err) { + console.warn(err); + } + } return; } if (drawFrame.image) { @@ -666,8 +683,9 @@ export class OdtToMarkdown { } } - setPicturesDir(picturesDir: string) { + setPicturesDir(picturesDir: string, picturesDirAbsolute?: string) { this.picturesDir = picturesDir; + this.picturesDirAbsolute = picturesDirAbsolute || picturesDir; } setRewriteRules(rewriteRules: RewriteRule[]) { diff --git a/src/odt/executeOdtToMarkdown.ts b/src/odt/executeOdtToMarkdown.ts index d86f1cd9..2d6c71c4 100644 --- a/src/odt/executeOdtToMarkdown.ts +++ b/src/odt/executeOdtToMarkdown.ts @@ -1,18 +1,20 @@ -import {OdtToMarkdown} from './OdtToMarkdown'; -import {UnMarshaller} from './UnMarshaller'; -import {DocumentStyles, LIBREOFFICE_CLASSES} from './LibreOffice'; -import {generateDocumentFrontMatter} from '../containers/transform/frontmatters/generateDocumentFrontMatter'; -import {OdtProcessor} from './OdtProcessor'; import fs from 'fs'; import path from 'path'; +import {OdtToMarkdown} from './OdtToMarkdown.ts'; +import {UnMarshaller} from './UnMarshaller.ts'; +import {DocumentStyles, LIBREOFFICE_CLASSES} from './LibreOffice.ts'; +import {generateDocumentFrontMatter} from '../containers/transform/frontmatters/generateDocumentFrontMatter.ts'; +import {OdtProcessor} from './OdtProcessor.ts'; + export async function executeOdtToMarkdown(workerData) { - const processor = new OdtProcessor(workerData.odtPath, true); - await processor.load(); + const processor = new OdtProcessor(true); + await processor.load(workerData.odtPath); await processor.unzipAssets(workerData.destinationPath, workerData.realFileName); const content = processor.getContentXml(); const stylesXml = processor.getStylesXml(); const fileNameMap = processor.getFileNameMap(); + const xmlMap = processor.getXmlMap(); const parser = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentContent'); const document = parser.unmarshal(content); @@ -23,12 +25,12 @@ export async function executeOdtToMarkdown(workerData) { throw Error('No styles unmarshalled'); } - const converter = new OdtToMarkdown(document, styles, fileNameMap); + const converter = new OdtToMarkdown(document, styles, fileNameMap, xmlMap); converter.setRewriteRules(workerData.rewriteRules); if (workerData.realFileName === '_index.md') { - converter.setPicturesDir('./' + workerData.realFileName.replace(/.md$/, '.assets/')); + converter.setPicturesDir('./' + workerData.realFileName.replace(/.md$/, '.assets/'), workerData.picturesDirAbsolute); } else { - converter.setPicturesDir('../' + workerData.realFileName.replace(/.md$/, '.assets/')); + converter.setPicturesDir('../' + workerData.realFileName.replace(/.md$/, '.assets/'), workerData.picturesDirAbsolute); } const markdown = await converter.convert(); const links = Array.from(converter.links); diff --git a/src/odt/postprocess/addEmptyLinesAfterParas.ts b/src/odt/postprocess/addEmptyLinesAfterParas.ts index 0231d231..a7a27754 100644 --- a/src/odt/postprocess/addEmptyLinesAfterParas.ts +++ b/src/odt/postprocess/addEmptyLinesAfterParas.ts @@ -34,6 +34,13 @@ export function addEmptyLinesAfterParas(markdownChunks: MarkdownNodes) { // return; } + if (chunk.children.length > 0) { + const lastChild = chunk.children[chunk.children.length - 1]; + if (lastChild.isTag && lastChild.tag === 'BR/') { + chunk.children.splice(chunk.children.length - 1, 1); + } + } + chunk.children.splice(chunk.children.length, 0, { ...markdownChunks.createNode('EOL/'), comment: 'addEmptyLinesAfterParas.ts: break after ' + chunk.tag, diff --git a/src/odt/postprocess/convertCodeBlockParagraphs.ts b/src/odt/postprocess/convertCodeBlockParagraphs.ts new file mode 100644 index 00000000..f67b42ac --- /dev/null +++ b/src/odt/postprocess/convertCodeBlockParagraphs.ts @@ -0,0 +1,57 @@ +import {MarkdownNode, MarkdownNodes} from '../MarkdownNodes.ts'; +import {walkRecursiveSync} from '../markdownNodesUtils.ts'; + +const CODEBLOCK_START = ''; +const CODEBLOCK_END = ''; + +function isCodeBlockPara(chunk: MarkdownNode, type: string) { + if (chunk.isTag === true && ['P'].includes(chunk.tag)) { + if (chunk.children.length !== 1) { + return false; + } + + const firstChunk = chunk.children[0]; + + if ('text' in firstChunk) { + const txt = firstChunk.text; + return (txt === type); + } + + } + return false; +} + +export function convertCodeBlockParagraphs(markdownChunks: MarkdownNodes) { + walkRecursiveSync(markdownChunks.body, async (node, ctx: { nodeIdx: number }) => { + if (isCodeBlockPara(node, CODEBLOCK_START)) { + for (let nodeIdx2 = ctx.nodeIdx + 1; nodeIdx2 < node.parent.children.length; nodeIdx2++) { + const node2 = node.parent.children[nodeIdx2]; + if (isCodeBlockPara(node2, CODEBLOCK_END)) { + console.log('hhh', ctx.nodeIdx, nodeIdx2); + const inner = node.parent.children.splice(ctx.nodeIdx + 1, nodeIdx2 - (ctx.nodeIdx + 1)); + + const toInsert = inner.map((part, idx) => { + if (!part.isTag) { + const pre = markdownChunks.createNode('PRE'); + pre.children.splice(0, 0, ...inner); + pre.payload.lang = (idx < inner.length - 1) ? 'codeblock' : 'codeblockend'; + return pre; + } + if (part.isTag && part.tag === 'P') { + part.tag = 'PRE'; + part.payload.lang = (idx < inner.length - 1) ? 'codeblock' : 'codeblockend'; + return part; + } + return part; + }); + + const emptyLine = markdownChunks.createNode('EMPTY_LINE/'); + emptyLine.comment = 'addEmptyLines.ts: after codeblock'; + toInsert.push(emptyLine); + node.parent.children.splice(ctx.nodeIdx, 2, ...toInsert); + break; + } + } + } + }); +} diff --git a/src/odt/postprocess/convertGoogleUrls.ts b/src/odt/postprocess/convertGoogleUrls.ts new file mode 100644 index 00000000..fbeb446a --- /dev/null +++ b/src/odt/postprocess/convertGoogleUrls.ts @@ -0,0 +1,11 @@ +import {MarkdownNodes} from '../MarkdownNodes.ts'; +import {walkRecursiveSync} from '../markdownNodesUtils.ts'; +import {replaceUrlsWithIds} from '../../utils/idParsers.ts'; + +export function convertGoogleUrls(markdownChunks: MarkdownNodes) { + walkRecursiveSync(markdownChunks.body, (chunk) => { + if ('text' in chunk) { + chunk.text = replaceUrlsWithIds(chunk.text); + } + }); +} diff --git a/src/odt/postprocess/convertMathMl.ts b/src/odt/postprocess/convertMathMl.ts new file mode 100644 index 00000000..7688669a --- /dev/null +++ b/src/odt/postprocess/convertMathMl.ts @@ -0,0 +1,27 @@ +import {MarkdownNodes} from '../MarkdownNodes.ts'; +import {walkRecursiveSync} from '../markdownNodesUtils.ts'; + +export function convertMathMl(markdownChunks: MarkdownNodes) { + walkRecursiveSync(markdownChunks.body, (chunk, ctx: { nodeIdx: number }) => { + if (!(chunk.isTag && chunk.tag === 'MATHML')) { + return; + } + + const prevChunk = chunk.parent.children[ctx.nodeIdx - 1]; + const nextChunk = chunk.parent.children[ctx.nodeIdx + 1]; + + if (prevChunk?.isTag === false || nextChunk?.isTag === false) { + const text = chunk.children.filter(c => c.isTag === false).map(c => c['text']).join('\n'); + chunk.parent.children.splice(ctx.nodeIdx, 1, { + isTag: false, + text: '$$' + text + '$$' + }); + return; + } + + chunk.tag = 'PRE'; + chunk.payload.lang = 'math'; + const brNode = markdownChunks.createNode('EMPTY_LINE/'); + chunk.parent.children.splice(ctx.nodeIdx + 1, 0, brNode); + }); +} diff --git a/src/odt/postprocess/fixIdLinks.ts b/src/odt/postprocess/fixIdLinks.ts new file mode 100644 index 00000000..d876c743 --- /dev/null +++ b/src/odt/postprocess/fixIdLinks.ts @@ -0,0 +1,31 @@ +import {MarkdownNodes} from '../MarkdownNodes.ts'; +import {extractText, walkRecursiveSync} from '../markdownNodesUtils.ts'; + +export function fixIdLinks(markdownChunks: MarkdownNodes) { + let inHtml = false; + walkRecursiveSync(markdownChunks.body, (chunk) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = true; + return; + } + + if (inHtml) { + return; + } + + if (chunk.isTag && 'A' === chunk.tag) { + if (chunk.payload?.href && chunk.payload?.href.startsWith('#')) { + const innerTxt = extractText(chunk); + const escapedText = innerTxt.toLowerCase().replace(/[^\w]+/g, ' ').trim().replaceAll(' ', '-'); + if (escapedText) { + chunk.payload.href = '#' + escapedText; + } + } + } + }, {}, (chunk) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = false; + return; + } + }); +} diff --git a/src/odt/postprocess/mergeParagraphs.ts b/src/odt/postprocess/mergeParagraphs.ts index b2966779..9fb1812a 100644 --- a/src/odt/postprocess/mergeParagraphs.ts +++ b/src/odt/postprocess/mergeParagraphs.ts @@ -15,6 +15,14 @@ export function mergeParagraphs(markdownChunks: MarkdownNodes) { } if (chunk.isTag && ['P', 'PRE'].includes(chunk.tag)) { + if (chunk.tag === 'PRE' && chunk.payload?.lang === 'math') { + return; + } + if (chunk.tag === 'PRE' && chunk.payload?.lang === 'codeblockend') { + chunk.payload.lang = ''; + return; + } + const nextChunk = chunk.parent.children[ctx.nodeIdx + 1]; if (nextChunk?.isTag && nextChunk.tag === chunk.tag) { const children = nextChunk.children.splice(0, nextChunk.children.length); diff --git a/src/odt/postprocess/postProcess.ts b/src/odt/postprocess/postProcess.ts index 439756b9..6b6d97ea 100644 --- a/src/odt/postprocess/postProcess.ts +++ b/src/odt/postprocess/postProcess.ts @@ -23,6 +23,11 @@ import {removeEmptyTags} from './removeEmptyTags.ts'; import {removeExcessiveLines} from './removeExcessiveLines.ts'; import {applyRewriteRules} from './applyRewriteRules.ts'; import {RewriteRule} from '../applyRewriteRule.ts'; +import {convertMathMl} from './convertMathMl.ts'; +import {unwrapEmptyPre} from './unwrapEmptyPre.ts'; +import {convertGoogleUrls} from './convertGoogleUrls.ts'; +import {fixIdLinks} from './fixIdLinks.ts'; +import {convertCodeBlockParagraphs} from './convertCodeBlockParagraphs.ts'; export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRule[]) { convertToc(chunks); @@ -31,6 +36,8 @@ export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRu fixSpacesInsideInlineFormatting(chunks); await fixBoldItalic(chunks); hideSuggestedChanges(chunks); + convertCodeBlockParagraphs(chunks); + convertMathMl(chunks); trimParagraphs(chunks); addEmptyLinesAfterParas(chunks); @@ -40,6 +47,7 @@ export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRu await rewriteHeaders(chunks); mergeParagraphs(chunks); + unwrapEmptyPre(chunks); removePreWrappingAroundMacros(chunks); await removeMarkdownMacro(chunks); postProcessPreMacros(chunks); @@ -48,8 +56,11 @@ export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRu removeEmptyTags(chunks); addEmptyLines(chunks); + fixIdLinks(chunks); removeExcessiveLines(chunks); + convertGoogleUrls(chunks); + applyRewriteRules(chunks, rewriteRules); if (process.env.DEBUG_COLORS) { diff --git a/src/odt/postprocess/processListsAndNumbering.ts b/src/odt/postprocess/processListsAndNumbering.ts index 51a85be5..98da6c6e 100644 --- a/src/odt/postprocess/processListsAndNumbering.ts +++ b/src/odt/postprocess/processListsAndNumbering.ts @@ -143,6 +143,10 @@ export function processListsAndNumbering(markdownChunks: MarkdownNodes) { if (isNumeric) { currentElement.payload.number = parentLevel.payload.number; + if (listStyle?.listLevelStyleNumber?.length > 0 && listStyle?.listLevelStyleNumber[0].startValue) { + currentElement.payload.number = +listStyle?.listLevelStyleNumber[0].startValue; + parentLevel.payload.number = +listStyle?.listLevelStyleNumber[0].startValue; + } } else { currentElement.payload.bullet = true; parentLevel.payload.bullet = true; @@ -151,7 +155,7 @@ export function processListsAndNumbering(markdownChunks: MarkdownNodes) { } return { ...ctx, level: ctx.level + 1 }; - }, { level: 0 }, (chunk, ctx: { level: number }) => { + }, { level: 0 }, (chunk) => { if (!chunk.isTag) { return; } diff --git a/src/odt/postprocess/unwrapEmptyPre.ts b/src/odt/postprocess/unwrapEmptyPre.ts new file mode 100644 index 00000000..b3e4aa78 --- /dev/null +++ b/src/odt/postprocess/unwrapEmptyPre.ts @@ -0,0 +1,74 @@ +import {MarkdownNodes} from '../MarkdownNodes.ts'; +import {walkRecursiveSync} from '../markdownNodesUtils.ts'; + +export function unwrapEmptyPre(markdownChunks: MarkdownNodes) { + + let inHtml = false; + walkRecursiveSync(markdownChunks.body, (chunk, ctx: { nodeIdx: number }) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = true; + return; + } + + if (inHtml) { + return; + } + + if (!chunk.isTag) { + return; + } + + if (chunk.isTag === true && ['PRE'].includes(chunk.tag)) { + let changed = false; + for (let i = 0; i < chunk.children.length; i++) { + const child = chunk.children[i]; + if (child.isTag && child.tag === 'EOL/') { + child.tag = 'EMPTY_LINE/'; + chunk.children.splice(i, 1); + chunk.parent.children.splice(ctx.nodeIdx - 1, 0, child); + i--; + changed = true; + continue; + } + break; + } + if (changed) { + return { + nodeIdx: ctx.nodeIdx + 1 + }; + } + } + }, {}, (chunk) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = false; + return; + } + }); + + inHtml = false; + walkRecursiveSync(markdownChunks.body, (chunk, ctx: { nodeIdx: number }) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = true; + return; + } + + if (inHtml) { + return; + } + + if (!chunk.isTag) { + return; + } + + if (chunk.isTag === true && ['PRE'].includes(chunk.tag)) { + if (chunk.children.length === 0) { + chunk.parent.children.splice(ctx.nodeIdx, 1); + } + } + }, {}, (chunk) => { + if (chunk.isTag && chunk.tag === 'HTML_MODE/') { + inHtml = false; + return; + } + }); +} diff --git a/src/odt2md.sh b/src/odt2md.sh new file mode 100755 index 00000000..fa36eaea --- /dev/null +++ b/src/odt2md.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +FULL_PATH="$(readlink -f ${BASH_SOURCE[0]})" +MAIN_DIR=$(dirname "$FULL_PATH")/.. +NODE_MODULES=$MAIN_DIR/node_modules + +POSITIONAL_ARGS=() +INSPECT="" + +ORIG_ARGS=$@ + +while [[ $# -gt 0 ]]; do + case $1 in + --inspect) + INSPECT="$1" + shift # past argument + ;; + *) + if [[ -z "$CMD" ]]; then + CMD=$1 + fi + POSITIONAL_ARGS+=("$1") # save positional arg + shift # past argument + ;; + esac +done + +if test "$INSPECT" = "--inspect"; then + /usr/bin/env node --inspect --no-warnings --enable-source-maps --experimental-specifier-resolution=node --loader ts-node/esm $MAIN_DIR/src/cli/odt2md.ts $ORIG_ARGS +else + /usr/bin/env node --no-warnings --enable-source-maps --experimental-specifier-resolution=node --loader ts-node/esm $MAIN_DIR/src/cli/odt2md.ts $ORIG_ARGS +fi diff --git a/src/utils/idParsers.ts b/src/utils/idParsers.ts index 53407892..9a7ab0f2 100644 --- a/src/utils/idParsers.ts +++ b/src/utils/idParsers.ts @@ -15,6 +15,12 @@ function stripUrlSuffix(id) { return null; } +export function replaceUrlsWithIds(text: string): string { + text = text.replaceAll('https://drive.google.com/open?id%3D', 'https://drive.google.com/open?id='); + text = text.replaceAll('https://drive.google.com/open?id=', 'gdoc:'); + return text; +} + export function urlToFolderId(url: string): string | null { if (!url) { return null; diff --git a/src/wikigdrive.sh b/src/wikigdrive.sh index 32a5ed2d..448aa299 100755 --- a/src/wikigdrive.sh +++ b/src/wikigdrive.sh @@ -19,7 +19,7 @@ while [[ $# -gt 0 ]]; do INSPECT="$1" shift # past argument ;; - --link_mode | --workdir | --drive | --debug | --client_id | --client_secret | --service_account | --share_email | --server_port) + --link_mode | --workdir | --drive | --debug | --client_id | --client_secret | --service_account | --share_email | --server_port | --transform_subdir) POSITIONAL_ARGS+=("$1") # save positional arg1 POSITIONAL_ARGS+=("$2") # save positional arg2 shift # past argument diff --git a/test/git/GitTest.ts b/test/git/GitTest.ts index ef43efc3..82330383 100644 --- a/test/git/GitTest.ts +++ b/test/git/GitTest.ts @@ -1,12 +1,14 @@ import {assert} from 'chai'; import winston from 'winston'; -import {instrumentLogger} from '../../src/utils/logger/logger'; -import {GitScanner} from '../../src/git/GitScanner'; -import {createTmpDir} from '../utils'; import fs from 'fs'; +import {rmSync, unlinkSync} from 'node:fs'; import path from 'path'; import {execSync} from 'child_process'; +import {instrumentLogger} from '../../src/utils/logger/logger.ts'; +import {GitScanner} from '../../src/git/GitScanner.ts'; +import {createTmpDir} from '../utils.ts'; + const COMMITER1 = { name: 'John', email: 'john@example.tld' }; @@ -609,4 +611,78 @@ describe('GitTest', function () { } }); + it('test remove assets', async () => { + const localRepoDir: string = createTmpDir(); + + try { + const scannerLocal = new GitScanner(logger, localRepoDir, COMMITER1.email); + await scannerLocal.initialize(); + + const commit = async (filePaths = [], removeFilePaths = []) => { + const fileAssetsPaths = []; + for (const filePath of filePaths.filter(path => path.endsWith('.md'))) { + const assetsPath = filePath.substring(0, filePath.length - 3) + '.assets'; + if (fs.existsSync(path.join(scannerLocal.rootPath, assetsPath))) { + fileAssetsPaths.push(assetsPath); + } + } + const removeFileAssetsPaths = []; + for (const fileToRemove of removeFilePaths + .filter(filePath => filePath.endsWith('.md')) + .map(filePath => filePath.substring(0, filePath.length - 3) + '.assets')) { + + removeFileAssetsPaths.push(fileToRemove); + } + + filePaths.push(...fileAssetsPaths); + removeFilePaths.push(...removeFileAssetsPaths); + + await scannerLocal.commit('initial commit', filePaths, removeFilePaths, COMMITER1); + }; + + fs.writeFileSync(path.join(scannerLocal.rootPath, 'test.md'), 'test'); + fs.mkdirSync(path.join(scannerLocal.rootPath, 'test.assets')); + + fs.writeFileSync(path.join(scannerLocal.rootPath, 'test.assets', '1.png'), '1'); + fs.writeFileSync(path.join(scannerLocal.rootPath, 'test.assets', '2.png'), '2'); + + { + const changes = await scannerLocal.changes({ includeAssets: true }); + assert.equal(changes.length, 4); + assert.ok(!!changes.find(item => item.path === '.gitignore')); + assert.ok(!!changes.find(item => item.path === 'test.md')); + assert.ok(!!changes.find(item => item.path === 'test.assets/1.png')); + assert.ok(!!changes.find(item => item.path === 'test.assets/2.png')); + } + + await commit(['.gitignore', 'test.md'], []); + + { + const changes = await scannerLocal.changes({ includeAssets: true }); + assert.equal(changes.length, 0); + } + + unlinkSync(path.join(scannerLocal.rootPath, 'test.md')); + rmSync(path.join(scannerLocal.rootPath, 'test.assets'), { recursive: true, force: true }); + + { + const changes = await scannerLocal.changes({ includeAssets: true }); + assert.equal(changes.length, 3); + assert.ok(!!changes.find(item => item.path === 'test.md')); + assert.ok(!!changes.find(item => item.path === 'test.assets/1.png')); + assert.ok(!!changes.find(item => item.path === 'test.assets/2.png')); + } + + await commit([], ['test.md']); + + { + const changes = await scannerLocal.changes({ includeAssets: true }); + assert.equal(changes.length, 0); + } + + } finally { + fs.rmSync(localRepoDir, { recursive: true, force: true }); + } + }); + }); diff --git a/test/md_html/header_links.html b/test/md_html/header_links.html new file mode 100644 index 00000000..d4a48fdf --- /dev/null +++ b/test/md_html/header_links.html @@ -0,0 +1,15 @@ + + + + + + +

Developer Guide

+

See Node setup on the system for prereq.


+

Example Google Drive Shared Folder


+

Node setup on the system

+

using OS

+ + diff --git a/test/md_html/header_links.md b/test/md_html/header_links.md new file mode 100644 index 00000000..02b7cace --- /dev/null +++ b/test/md_html/header_links.md @@ -0,0 +1,9 @@ +# Developer Guide + +See [Node setup on the system](#node-setup-on-the-system) for prereq. + +[Example Google Drive Shared Folder](https://drive.google.com/open?id=0AIkOKXbzWCtSUk9PVA) + +# Node setup on the system + +## using OS diff --git a/test/md_html/markdownToHtmlTest.ts b/test/md_html/markdownToHtmlTest.ts index 1d7b2935..33222208 100644 --- a/test/md_html/markdownToHtmlTest.ts +++ b/test/md_html/markdownToHtmlTest.ts @@ -48,7 +48,7 @@ describe('markdownToHtml', () => { assert.equal(map['../subdir/diagram.svg'], '/aaa/subdir/diagram.svg'); const serilzd = render(dom); - console.log(serilzd); + assert.ok(!!serilzd); }); it('test markdownToHtml pre', async () => { @@ -59,10 +59,8 @@ describe('markdownToHtml', () => { const dom = htmlparser2.parseDocument(html); - console.log(markdown); - const serilzd = render(dom); - console.log(serilzd); + assert.ok(!!serilzd); }); it('test markdownToHtml link_to_image', async () => { @@ -73,8 +71,6 @@ describe('markdownToHtml', () => { const dom = htmlparser2.parseDocument(html); - console.log(markdown); - const serilzd = render(dom); console.log(serilzd); }); @@ -85,4 +81,10 @@ describe('markdownToHtml', () => { assert.ok(compareTexts(testHtml, markdown)); }); + it('test ./header_links', async () => { + const testHtml = fs.readFileSync(__dirname + '/header_links.html').toString(); + const markdown = await transformMd('header_links'); + assert.ok(compareTexts(testHtml, markdown)); + }); + }); diff --git a/test/md_html/paras.html b/test/md_html/paras.html index 01ac273f..bc7f92cd 100644 --- a/test/md_html/paras.html +++ b/test/md_html/paras.html @@ -6,7 +6,7 @@ -

Why not make a website front end to a Google shared drive?

+

Why not make a website front end to a Google shared drive?

Our goals are to be able to take versions of the content and commit them along with a version of the code at a point in time. By just making a website, it would allow for real-time viewing of the content but no way to go to a specific version of the documentation at a given time.


A website front end is a goal for real-time testing of the viewing experience, but initially, we want to make markdown that can be committed.


diff --git a/test/odt/OdtLoad.test.ts b/test/odt/OdtLoad.test.ts index c88ab509..b1c55386 100644 --- a/test/odt/OdtLoad.test.ts +++ b/test/odt/OdtLoad.test.ts @@ -1,13 +1,14 @@ -import {UnMarshaller} from '../../src/odt/UnMarshaller'; -import {DocumentContent, DocumentStyles, LIBREOFFICE_CLASSES} from '../../src/odt/LibreOffice'; -import {OdtProcessor} from '../../src/odt/OdtProcessor'; -import {FileContentService} from '../../src/utils/FileContentService'; -import {OdtToMarkdown} from '../../src/odt/OdtToMarkdown'; import {assert} from 'chai'; import path from 'path'; import { fileURLToPath } from 'url'; +import {UnMarshaller} from '../../src/odt/UnMarshaller.ts'; +import {DocumentStyles, LIBREOFFICE_CLASSES} from '../../src/odt/LibreOffice.ts'; +import {OdtProcessor} from '../../src/odt/OdtProcessor.ts'; +import {FileContentService} from '../../src/utils/FileContentService.ts'; +import {OdtToMarkdown} from '../../src/odt/OdtToMarkdown.ts'; + const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -15,8 +16,8 @@ describe('OdtLoad', () => { it('test content.xml transform to object', async () => { const fileSystem = new FileContentService(__dirname); const odtPath = fileSystem.getRealPath() + '/' + 'example_document.odt'; - const processor = new OdtProcessor(odtPath); - await processor.load(); + const processor = new OdtProcessor(); + await processor.load(odtPath); const content = processor.getContentXml(); @@ -25,7 +26,7 @@ describe('OdtLoad', () => { // console.log(JSON.stringify(document, null, 2)); - const converter = new OdtToMarkdown(document, new DocumentStyles(), processor.getFileNameMap()); + const converter = new OdtToMarkdown(document, new DocumentStyles(), processor.getFileNameMap(), processor.getXmlMap()); const md = await converter.convert(); assert.ok(md); }); diff --git a/test/odt_md/Issues.test.ts b/test/odt_md/Issues.test.ts index 696eae47..7d7c6c9a 100644 --- a/test/odt_md/Issues.test.ts +++ b/test/odt_md/Issues.test.ts @@ -59,13 +59,27 @@ describe('MarkDownTransformTest', () => { assert.ok(compareTexts(testMarkdown, markdown, false, 'issue-443.md')); }); + it('test ./our-docs', async () => { + // https://github.com/mieweb/wikiGDrive/issues/443 + const testMarkdown = fs.readFileSync(__dirname + '/our-docs.md').toString(); + const markdown = await transformOdt('our-docs'); + assert.ok(compareTexts(testMarkdown, markdown, false, 'our-docs.md')); + }); + + it('test ./header-link', async () => { + // https://github.com/mieweb/wikiGDrive/issues/443 + const testMarkdown = fs.readFileSync(__dirname + '/header-link.md').toString(); + const markdown = await transformOdt('header-link'); + assert.ok(compareTexts(testMarkdown, markdown, false, 'header-link.md')); + }); + }); async function transformOdt(id: string) { const folder = new FileContentService(__dirname); const odtPath = folder.getRealPath() + '/' + id + '.odt'; - const processor = new OdtProcessor(odtPath); - await processor.load(); + const processor = new OdtProcessor(); + await processor.load(odtPath); if (!processor.getContentXml()) { throw Error('No odt processed'); } diff --git a/test/odt_md/MarkDownTransform.test.ts b/test/odt_md/MarkDownTransform.test.ts index 4cf18ac1..5f8b87c9 100644 --- a/test/odt_md/MarkDownTransform.test.ts +++ b/test/odt_md/MarkDownTransform.test.ts @@ -130,20 +130,26 @@ describe('MarkDownTransformTest', () => { assert.ok(compareTexts(testMarkdown, markdown, false)); }); + it('test ./code-blocks.md', async () => { + const testMarkdown = fs.readFileSync(__dirname + '/code-blocks.md').toString(); + const markdown = await transformOdt('code-blocks'); + assert.ok(compareTexts(testMarkdown, markdown, false)); + }); + }); async function transformOdt(id: string) { const folder = new FileContentService(__dirname); const odtPath = folder.getRealPath() + '/' + id + '.odt'; - const processor = new OdtProcessor(odtPath); - await processor.load(); + const processor = new OdtProcessor(); + await processor.load(odtPath); if (!processor.getContentXml()) { throw Error('No odt processed'); } - return transform(processor.getContentXml(), processor.getStylesXml()); + return await transform(processor.getContentXml(), processor.getStylesXml(), processor); } -async function transform(contentXml: string, stylesXml: string) { +async function transform(contentXml: string, stylesXml: string, processor: OdtProcessor) { const parser = new UnMarshaller(LIBREOFFICE_CLASSES, 'DocumentContent'); const document: DocumentContent = parser.unmarshal(contentXml); if (!document) { @@ -154,6 +160,6 @@ async function transform(contentXml: string, stylesXml: string) { if (!styles) { throw Error('No styles unmarshalled'); } - const converter = new OdtToMarkdown(document, styles); + const converter = new OdtToMarkdown(document, styles, processor.getFileNameMap(), processor.getXmlMap()); return await converter.convert(); } diff --git a/test/odt_md/RewriteRules.test.ts b/test/odt_md/RewriteRules.test.ts index 1ae71d24..6aa80374 100644 --- a/test/odt_md/RewriteRules.test.ts +++ b/test/odt_md/RewriteRules.test.ts @@ -42,8 +42,8 @@ describe('RewriteRulesTest', () => { async function transformOdt(id: string) { const folder = new FileContentService(__dirname); const odtPath = folder.getRealPath() + '/' + id + '.odt'; - const processor = new OdtProcessor(odtPath); - await processor.load(); + const processor = new OdtProcessor(); + await processor.load(odtPath); if (!processor.getContentXml()) { throw Error('No odt processed'); } diff --git a/test/odt_md/code-blocks.md b/test/odt_md/code-blocks.md new file mode 100644 index 00000000..d5af1583 --- /dev/null +++ b/test/odt_md/code-blocks.md @@ -0,0 +1,30 @@ +**This** is a line + +**This** is line two + +* This is a bulleted test +* Second line + +``` +void hi() { +} + +hi(); +``` + +``` +function hi2() { +} + +hi2(); +``` + +This is a cool new wikiGDrive! With a change. + +![](100000000000080100000600B855EF66465B18B1.jpg) + +![](1000000000000801000006008E6541DADAF62125.jpg) + +![](10000000000009C400000753DA4A57339A651C1F.jpg) + +![](10000000000009C40000075322E66BBDAA567D45.jpg) diff --git a/test/odt_md/code-blocks.odt b/test/odt_md/code-blocks.odt new file mode 100644 index 00000000..893874ce Binary files /dev/null and b/test/odt_md/code-blocks.odt differ diff --git a/test/odt_md/example-document.md b/test/odt_md/example-document.md index 07b1c4fd..8e7a0d81 100644 --- a/test/odt_md/example-document.md +++ b/test/odt_md/example-document.md @@ -64,7 +64,9 @@ After subtable ## Image -![](1000000000000200000001804F9AAE46CD6D0DF2.gif) +![](1000000000000640000001CF60FB0243CA95EC14.jpg) + +![](10000000000003F0000003F092F85671239C65F9.jpg) ## Preformatted Text @@ -80,7 +82,7 @@ Code blocks are part of the Markdown spec, but syntax highlighting isn't. Howeve ### Typescript / Javascript -{{% markdown %}} +{{markdown}} ```javascript class MyClass { @@ -98,7 +100,7 @@ module MyModule { declare magicNumber number; myArray.forEach(() => { }); // fat arrow syntax ``` -{{% /markdown %}} +{{/markdown}} ## Video @@ -136,10 +138,20 @@ Some **bold** **_boldanditalic_*** italic* text ### Using the actual equation object +```math +E = m c^{2} +``` + +```math +e^{i \pi} - 1 = 0 +``` + ### Text equivalent *E=mc**2* +Inline $$E = m c^{2}$$ math + ## Footnotes 1Footnotes should display as a footnote, and should always display at the very end of the document (page)**?** This is some sample text with a footnote. diff --git a/test/odt_md/example-document.odt b/test/odt_md/example-document.odt index 3ac88a37..88fd51d8 100644 Binary files a/test/odt_md/example-document.odt and b/test/odt_md/example-document.odt differ diff --git a/test/odt_md/header-link.md b/test/odt_md/header-link.md new file mode 100644 index 00000000..2dda8f47 --- /dev/null +++ b/test/odt_md/header-link.md @@ -0,0 +1,14 @@ +# Developer Guide + +See [Node setup on the system](#node-setup-on-the-system) for prereq. + +[Example Google Drive Shared Folder](gdoc:0AIkOKXbzWCtSUk9PVA) + +# Node setup on the system + +## using OS + +``` +curl -sL https://deb.nodesource.com/setup_16.x | sudo bash - +sudo apt install nodejs +``` diff --git a/test/odt_md/header-link.odt b/test/odt_md/header-link.odt new file mode 100644 index 00000000..42a70628 Binary files /dev/null and b/test/odt_md/header-link.odt differ diff --git a/test/odt_md/list-indent.md b/test/odt_md/list-indent.md index 22c15989..5771a526 100644 --- a/test/odt_md/list-indent.md +++ b/test/odt_md/list-indent.md @@ -3,7 +3,7 @@ ![](10000201000004BE0000011D30E30AE192655040.png) -3. When adding action items to panels, the [Representative Event panel action](#tyjcwt) is usually added to the panel first. Fill out all of the necessary fields according to the information acquired in the Health Surveillance matrix, and click Submit to save the panel action to the panel. +3. When adding action items to panels, the [Representative Event panel action](#representative-event-panel-action) is usually added to the panel first. Fill out all of the necessary fields according to the information acquired in the Health Surveillance matrix, and click Submit to save the panel action to the panel. 1. Action Name: Required field. The Action Name is usually the name of a test/procedure that is the component/action of the panel. The name will be displayed listings and dialogues throughout the system. 2. Lead Time: The Lead Time translates to the number of days prior to the Trigger Date the panel action becomes visible and is created within the system. This defines how many days before the Trigger Date that the panel/orders will populate on the Due List. Keep Lead Times consistent when setting multiple action items in a panel; otherwise, each component of the panel will have different Due Dates if there are different Lead Times on each. Emails can be configured to send email notifications, as needed, with a list of associated charts/employees that will be due. The recipient has the time between receiving the email and the panel action Trigger Date to notify Health Services of any issues or mistakes with the list. Emails to the member/chart will not be sent until the actual Trigger Date. (Email reminders are separately configured on a per client basis. Email notification may not apply to all clients). @@ -13,7 +13,7 @@ If the panel action is for a type of exposure, users will not want to set any Le {{% /tip %}} 3. Required for Certification: Select this to indicate the panel action is required for members of the panel. Leave unchecked if the panel action is voluntary. If checked, a panel member failing or becoming overdue for the action will become de-certified from the panel. - 4. Indication Rule: Users can select any action rule found in the Action Rules editor, using the drop-down. For more information on the Action Rules, see the [Health Surveillance Action Rules](gdoc:10wTqIF8gtUDBbJmbk_LjlUeNmtU_vvbVFoVWTZnuMqc) documentation. The action rule must evaluate to True in order for this panel action to trigger for a panel member. [Action Rules](#1fob9te) are usually configured by an MIE Developer after an MIE Implementer has collected all of the necessary details for the configuration. + 4. Indication Rule: Users can select any action rule found in the Action Rules editor, using the drop-down. For more information on the Action Rules, see the [Health Surveillance Action Rules](gdoc:10wTqIF8gtUDBbJmbk_LjlUeNmtU_vvbVFoVWTZnuMqc) documentation. The action rule must evaluate to True in order for this panel action to trigger for a panel member. [Action Rules](#action-rules) are usually configured by an MIE Developer after an MIE Implementer has collected all of the necessary details for the configuration. 1. Indication Rules can be used to only trigger the panel action for a member of the panel, if they are part of a specific department, for example. Or another more complex example would be a panel action configured to trigger a Hep3rd injection, only if the member of the panel had the second Hepatitis injection given within the last 8 weeks. 5. Contraindication Rule: Users can select any action rule found in the Action Rules editor, using the drop-down. The action rule must evaluate to False in order for this panel action to trigger for a panel member. For more information on the Action Rules, see the [Health Surveillance Action Rules](gdoc:10wTqIF8gtUDBbJmbk_LjlUeNmtU_vvbVFoVWTZnuMqc) documentation. 6. Trigger Type: Entry, Routine, Exit. Select the type of trigger, to define at what point in the panel member's current role/job status, the regulating agency or company requires the panel action to be completed. Entry will trigger when a panel member is first put in the panel. The Panel Evaluator scheduled job will run every day, triggering panels as appropriate, based on the the configured panel actions and the trigger type selected. diff --git a/test/odt_md/our-docs.md b/test/odt_md/our-docs.md new file mode 100644 index 00000000..e52c75cb --- /dev/null +++ b/test/odt_md/our-docs.md @@ -0,0 +1,171 @@ +# wikiGDrive + +Google Drive to MarkDown synchronization + +[![Develop Server Deploy](https://github.com/mieweb/wikiGDrive/actions/workflows/DevelopServerDeploy.yml/badge.svg?branch=develop&event=push)](https://github.com/mieweb/wikiGDrive/actions/workflows/DevelopServerDeploy.yml) [![Prod Server Deploy](https://github.com/mieweb/wikiGDrive/actions/workflows/ProdServerDeploy.yml/badge.svg?branch=master&event=push)](https://github.com/mieweb/wikiGDrive/actions/workflows/ProdServerDeploy.yml) [![CodeQL](https://github.com/mieweb/wikiGDrive/actions/workflows/codeql-analysis.yml/badge.svg?branch=master&event=push)](https://github.com/mieweb/wikiGDrive/actions/workflows/codeql-analysis.yml?query=event%3Apush+branch%3Amaster+) + +WikiGDrive is a node app that uses the [Google Drive API](https://developers.google.com/drive/api/v3/quickstart/nodejs) to transform Google Docs and Drawings into markdown. + +![Diagram](gdoc:1TjB_v1gcBy23MtBQKvh5FZ3fFUp520QZ) + +[Google Drive Notes](gdoc:1H6vwfQXIexdg4ldfaoPUjhOZPnSkNn6h29WD6Fi-SBY) | [Github Project](https://github.com/mieweb/wikiGDrive/projects) | [Github Developer Notes](gdoc:1NJUxTnJHgkMO3JV1v_DFPsVsl8nDhweyDvNW0y98TGs) + +With a "Shared Drive" as the key, WikiGDrive: + +* Reads all the files from a Google "Shared Drive" +* Builds a map of the driveId (URL) to the pathname in the "Shared Drive" +* For each Google Document: + * Converts to a Markdown file with the path (instead of the driveId for the file) + * Changes driveId to the path (eg: 12lvdxKgGsD.../edit would be changed to /filename + * Support diagrams as SVG (and map the URLs in the diagram) + +WikiGDrive scans for changes in the drive and then refresh the local converted files. + +## Developer Documentation + +* [Developer README](gdoc:1NJUxTnJHgkMO3JV1v_DFPsVsl8nDhweyDvNW0y98TGs) +* [Internals](gdoc:1ug9ASkGlkwJHyYRoFB4OY3GJCDFqQikz2UN0HTQFs88) + +## Install from NPM + +*Not currently working* + +See [https://github.com/mieweb/wikiGDrive/issues/297](https://github.com/mieweb/wikiGDrive/issues/297) for status. + +[![View this project on NPM](https://img.shields.io/npm/v/@mieweb/wikigdrive.svg)](https://www.npmjs.com/package/@mieweb/wikigdrive) [![NPM downloads](https://img.shields.io/npm/dm/@mieweb/wikigdrive.svg)](https://www.npmjs.com/package/@mieweb/wikigdrive) ![Publish wikigdrive to NPM](https://github.com/mieweb/wikiGDrive/workflows/Publish%20wikigdrive%20to%20NPM/badge.svg) +``` +npm i -g @mieweb/wikigdrive +``` + +## App setup + +1. Go to [console](https://console.developers.google.com/) +2. Create New Project +3. Enable Apis -> add Google Drive API +4. Enable Apis -> Add Google Docs API +5. Credentials -> Create Credentials (OAuth Client ID) -> Other ( see authorization section ) + +## Usage and options + +Init workdir with (creates internal .wgd directory): + +``` +wikigdrive init --drive "https://drive.google.com/drive/folders/FOLDER_ID" + +--service_account=wikigdrive.json +--config /location/of/.wgd - Location of config file +--dest /location/of/downloaded/content - Destination for downloaded and converted markdown files + +--client_id - ID of google app, alternatively can be passed in .env or through environment variable CLIENT_ID; +--client_secret - Secret of google app, alternatively can be passed in .env or through environment variable CLIENT_SECRET; + +--link_mode - Style of internal markdown links +--link_mode mdURLs - `/filename.md` +--link_mode dirURLs - `/filename/` +--link_mode uglyURLs - `/filename.html` - see https://gohugo.io/getting-started/configuration/ +``` + +List available drive ids that wikigdrive has access to on Google: + +``` +wikigdrive drives +``` + +Run one time documents pull + +``` +wikigdrive pull +``` + +Run server mode + +``` +wikigdrive server +``` + +## Example usage with Hugo Generator + +1. Install [hugo](https://gohugo.io/getting-started/quick-start/) +2. Create a New Site +``` +hugo new site quickstart +``` + +3. Add a Theme +``` +cd quickstart +git init +git submodule add https://github.com/budparr/gohugo-theme-ananke.git themes/ananke +echo 'theme = "ananke"' >> config.toml +``` + +4. Install wikigdrive +``` +npm i -g @mieweb/wikigdrive +``` + +5. Sync GDrive +``` +wikigdrive init --drive "https://drive.google.com/drive/folders/FOLDER_ID" --dest ./content --link_mode uglyURLs +wikigdrive pull +``` + +Note that by default you need to use [uglyURLs](https://gohugo.io/content-management/urls/#ugly-urls) with Hugo. + +6. Generate HTML +``` +hugo +``` + +or start server for development: + +``` +hugo server +``` + +## Example usage with Hexo Generator + +1. Install [hexo](https://hexo.io/docs/main.html) +``` +npm i -g hexo-cli +``` + +2. Create a New Site +``` +hexo init quickstart +``` + +3. Add a Theme + +By default, hexo installs landscape theme. If you need another one check [hexo themes](https://hexo.io/docs/themes) + +4. Install wikigdrive +``` +npm i -g @mieweb/wikigdrive +``` + +5. Sync GDrive +``` +wikigdrive init --drive "https://drive.google.com/drive/folders/FOLDER_ID" --dest ./source --link_mode uglyURLs +wikigdrive pull +``` + +6. Generate HTML +``` +hexo generate +``` + +or start server for development: + +``` +hexo serve +``` + +## Authorization + +There are two methods: individual credentials or a service account. + +* [Individual](https://cloud.google.com/docs/authentication/end-user#creating_your_client_credentials) +* [Service Account](https://developers.google.com/identity/protocols/oauth2/service-account#delegatingauthority) + +Note: If the authentication is successful, but the account does not have access to documents in gdrive, there is currently no way to know if the directory is empty or just not possible to see. diff --git a/test/odt_md/our-docs.odt b/test/odt_md/our-docs.odt new file mode 100644 index 00000000..25bf27af Binary files /dev/null and b/test/odt_md/our-docs.odt differ diff --git a/test/odt_md/project-overview.md b/test/odt_md/project-overview.md index 6b5f4155..10736ba2 100644 --- a/test/odt_md/project-overview.md +++ b/test/odt_md/project-overview.md @@ -6,9 +6,9 @@ * [Collisions with Filenames](#collisions-with-filenames) * [Table of Contents and Index](#table-of-contents-and-index) * [Table of Contents](#table-of-contents) - * [Index](#_25nwvh7c83vs) + * [Index](#index) * [Markdown Cleanup](#markdown-cleanup) - * [Macros to Hugo Shortcodes (issue)](#_m5135xwpqj94) + * [Macros to Hugo Shortcodes (issue)](#macros-to-hugo-shortcodes-issue) * [Images](#images) * [FAQ](#faq) @@ -22,9 +22,9 @@ * [Collisions with Filenames](#collisions-with-filenames) * [Table of Contents and Index](#table-of-contents-and-index) * [Table of Contents](#table-of-contents) - * [Index](#_25nwvh7c83vs) + * [Index](#index) * [Markdown Cleanup](#markdown-cleanup) - * [Macros to Hugo Shortcodes (issue)](#_m5135xwpqj94) + * [Macros to Hugo Shortcodes (issue)](#macros-to-hugo-shortcodes-issue) * [Images](#images) * [FAQ](#faq) @@ -66,16 +66,16 @@ The app must: 1. be able to be run once or run as a daemon (looking for changes in near real-time) 2. Take changes from gdrive and propagate them down to the local file system (likely a git repo) -3. Detect file [moves and renames](#renames-and-redirecting) from prior runs and leave redirects in the local file system to the new file or directory. +3. Detect file [moves and renames](#moves-and-renames) from prior runs and leave redirects in the local file system to the new file or directory. 4. Convert google docs to markdown while preserving as much of the meaning of the document. (Headings, images, drawings, tables, etc). 1. Each generated file should have parsable comments embedded in the original source google doc is known. 2. Embedded images (not originally stored on the shared folder will have to be extracted to the filesystem with a hashing system to prevent duplicate copies files in cases where images are pasted into multiple documents. 5. Convert google drawings to svg and fix up urls as well. 6. Download images and place them in the proper folder. Embed metadata in the image pointing to the source on the google drive. It could be a .md file with the same name as the image. 7. Translate hyperlinks to the filesystem relative paths if they exist in the shared drive (both within Docs and Drawings). Must support both document urls and heading URLs. -8. Construct a [table of contents and an index](#table-of-contents-and-index) from all of the documents in the shared drive. +8. Construct a [table of contents and an index](#table-of-contents-and-an-index) from all of the documents in the shared drive. 1. It should be parsable so Javascript on the client could search and build navigation - 2. There should be generated markdown file ([toc.md](#table-of-contents) and [index.md](#_25nwvh7c83vs)) + 2. There should be generated markdown file ([toc.md](#toc-md) and [index.md](#index-md)) Later phase: diff --git a/test/odt_md/strong-headers.md b/test/odt_md/strong-headers.md index 8a6f3db3..b21393a9 100644 --- a/test/odt_md/strong-headers.md +++ b/test/odt_md/strong-headers.md @@ -22,7 +22,7 @@ Some clients require the migration of historical, or non-active HS memberships. #### Open Orders -In some cases, open or pending orders are required for migration for employees with overdue but active panel memberships. This use case is discussed further in the section on [How to determine the Next Due Date on a HS Panel](#3rdcrjn). +In some cases, open or pending orders are required for migration for employees with overdue but active panel memberships. This use case is discussed further in the section on [How to determine the Next Due Date on a HS Panel](#how-to-determine-the-next-due-date-on-a-hs-panel). #### Historical Orders diff --git a/test/odt_md/suggest.md b/test/odt_md/suggest.md index 90f6c682..39cb45e4 100644 --- a/test/odt_md/suggest.md +++ b/test/odt_md/suggest.md @@ -3,7 +3,6 @@ This is a readme ``` - Code block testing diff --git a/test/utils.ts b/test/utils.ts index 8e1145cc..336d9f37 100644 --- a/test/utils.ts +++ b/test/utils.ts @@ -4,8 +4,8 @@ import path from 'path'; import {createPatch} from 'diff'; import {ansi_colors} from '../src/utils/logger/colors.ts'; -export function createTmpDir() { - return fs.mkdtempSync(path.join(os.tmpdir(), 'wg-')); +export function createTmpDir(prefix = 'wg-') { + return fs.mkdtempSync(path.join(os.tmpdir(), prefix)); } // eslint-disable-next-line @typescript-eslint/no-unused-vars diff --git a/website/docs/developer-guide.md b/website/docs/developer-guide.md index f3a3f030..9f3b191e 100644 --- a/website/docs/developer-guide.md +++ b/website/docs/developer-guide.md @@ -4,7 +4,7 @@ navWeight: -15 --- # Developer Guide -See [Node setup on the system](#Node-setup-on-the-system) for prereq. +See [Node setup on the system](#node-setup-on-the-system) for prereq. [Example Google Drive Shared Folder](https://drive.google.com/open?id=0AIkOKXbzWCtSUk9PVA) diff --git a/website/docs/usage/wikigdrive-usage.md b/website/docs/usage/wikigdrive-usage.md index 38ed8bde..c5879960 100644 --- a/website/docs/usage/wikigdrive-usage.md +++ b/website/docs/usage/wikigdrive-usage.md @@ -16,6 +16,7 @@ wikigdrive config --client_id --client_secret --service_account=./private_key.json + --transform_subdir=/content wikigdrive server --link_mode [mdURLs|dirURLs|uglyURLs] @@ -60,6 +61,7 @@ Options: --client_id GOOGLE_DRIVE_API CLIENT_ID --client_secret GOOGLE_DRIVE_API CLIENT_SECRET --service_account GOOGLE_DRIVE_API SERVICE_ACCOUNT_JSON file location +--transform_subdir markdown destination subdirectory ``` ## wikigdrive drives usage