const fs = require("fs"); const path = require("path"); const http = require("http"); const https = require("https"); const { execSync } = require("child_process"); const srcDir = "src"; const outDir = "build"; const downloads = [ { url: "https://coomer.su/api/v1/creators.txt", out: path.join(srcDir, "coomer.su.json"), dont_redownload: true, after: function (pathd) { let data = fs.readFileSync(pathd, "utf8"); let creators = JSON.parse(data); let newCreators = {}; for (let i = 0; i < creators.length; i++) { /* {"id":"carbcollector","name":"carbcollector","service":"onlyfans","indexed":1703401003,"updated":1705076060,"favorited":19} */ let creator = creators[i]; let id = creator.id; let service = creator.service; let name = creator.name; let creatorObj = { id: id, name: name, }; if (newCreators[service] === undefined) { newCreators[service] = []; } newCreators[service].push(creatorObj); } let ret = JSON.stringify(newCreators); fs.writeFileSync(path.join(srcDir, "coomer.su-modded.json"), ret); console.log( "coomer.su-modded.json is " + (data.length - ret.length) + " smaller, roughly " + Math.round(((data.length - ret.length) / data.length) * 100) + "%" ); }, }, { url: "https://kemono.su/api/v1/creators.txt", out: path.join(srcDir, "kemono.su.json"), dont_redownload: true, after: function (pathd) { let data = fs.readFileSync(pathd, "utf8"); let creators = JSON.parse(data); let newCreators = {}; for (let i = 0; i < creators.length; i++) { /* {"id":"carbcollector","name":"carbcollector","service":"onlyfans","indexed":1703401003,"updated":1705076060,"favorited":19} */ let creator = creators[i]; let id = creator.id; let service = creator.service; if (service === "discord") continue; let name = creator.name; let creatorObj = { id: id, name: name, }; if (newCreators[service] === undefined) { newCreators[service] = []; } newCreators[service].push(creatorObj); } let ret = JSON.stringify(newCreators); fs.writeFileSync(path.join(srcDir, "kemono.su-modded.json"), ret); console.log( "kemono.su-modded.json is " + (data.length - ret.length) + " smaller, roughly " + Math.round(((data.length - ret.length) / data.length) * 100) + "%" ); }, }, ]; function getRemoteFile(file, url) { return new Promise((resolve, reject) => { let localFile = fs.createWriteStream(file); const client = url.startsWith("https") ? https : http; const request = client.get(url, function (response) { // var len = parseInt(response.headers['content-length'], 10); // var cur = 0; // var total = len / 1048576; //1048576 - bytes in 1 Megabyte // response.on('data', function(chunk) { // cur += chunk.length; // showProgress(file, cur, len, total); // }); response.on("end", function () { console.log("[" + url + "] Download complete => " + file); resolve(); }); response.pipe(localFile); localFile.on("finish", () => localFile.close()); localFile.on("error", (err) => { fs.unlink(file); // Delete the file async if there's an error reject(err); }); }); request.on("error", (err) => { reject(err); }); }); } async function downloadFiles() { for (let i = 0; i < downloads.length; i++) { let download = downloads[i]; let file = download.out; let url = download.url; if (download.dont_redownload) { if (fs.statSync(file).isFile()) { continue; } } await getRemoteFile(file, url); download.after(file); } } async function buildScript(scriptPath) { const srcDir = path.dirname(scriptPath); const scriptName = path.basename(scriptPath, ".user.js"); const metaPath = path.join(srcDir, scriptName + `.meta.js`); const scrPath = srcDir.replace(/^src(\\|\/)?/, ""); let outdDir = [outDir, scrPath]; const outPath = path.join(...outdDir, `${scriptName}.user.js`); const tempPath = path.join(...outdDir, `${scriptName}.user.temp.js`); const minifiedPath = path.join(...outdDir, `${scriptName}.min.user.js`); console.log({ srcDir, outdDir, scriptPath, scriptName, metaPath, outPath, tempPath, minifiedPath, }); if (!fs.existsSync(metaPath)) { console.error(`Meta file not found: ${metaPath}`); return; } console.log(`Compiling ${scriptPath}`); // Compile the script with Babel execSync(`npx babel ${scriptPath} -o ${tempPath}`); // Read and prepend the meta content let metaContentOriginal = fs .readFileSync(metaPath, "utf8") .replace(/{{UNIXDATE}}/gim, Date.now()); let scrPathd = scrPath.replace(/\\/g, "/"); let metaContent = metaContentOriginal.replace( /{{FILE_URL}}/gim, `https://git.bowu.dev/bowu/UserScriptsPublic/raw/branch/main/build/${scrPathd}/${scriptName}.user.js` ); let metaMiniContent = metaContentOriginal.replace( /{{FILE_URL}}/gim, `https://git.bowu.dev/bowu/UserScriptsPublic/raw/branch/main/build/${scrPathd}/${scriptName}.min.user.js` ); const scriptContent = fs.readFileSync(tempPath, "utf8"); fs.writeFileSync(outPath, `${metaContent}\n${scriptContent}`); console.log(`Built: ${outPath}`); // Minify the script while preserving comments in the meta file const Terser = require("terser"); const minifiedResult = await Terser.minify(scriptContent, { output: { comments: false, // Do not keep comments in the minified output }, }); if (minifiedResult.error) { console.error(`Error minifying script: ${minifiedResult.error}`); return; } // Prepend the meta content to the minified script fs.writeFileSync(minifiedPath, `${metaMiniContent}\n${minifiedResult.code}`); console.log(`Minified: ${minifiedPath}`); fs.unlinkSync(tempPath); } function buildAll() { const files = fs.readdirSync(srcDir, { recursive: true, }); files.forEach((file) => { console.log(file); if (file.endsWith(".user.js")) { buildScript(path.join(srcDir, file)); } }); } (async function () { await downloadFiles(); buildAll(); })();