|
| 1 | +#!/usr/bin/env node |
| 2 | + |
| 3 | +// Bumps @require'd JS in userscript |
| 4 | +// NOTE: Doesn't git commit to allow script editing from breaking changes |
| 5 | + |
| 6 | +(async () => { |
| 7 | + |
| 8 | + // Import LIBS |
| 9 | + const fs = require('fs'), // to read/write files |
| 10 | + ssri = require('ssri') // to generate SHA-256 hashes |
| 11 | + |
| 12 | + // Init REPO context |
| 13 | + const repoName = 'amazongpt', |
| 14 | + userJSfilePath = `./greasemonkey/${repoName}.user.js` |
| 15 | + |
| 16 | + // Init UI COLORS |
| 17 | + const nc = '\x1b[0m', // no color |
| 18 | + dg = '\x1b[38;5;243m', // dim gray |
| 19 | + bw = '\x1b[1;97m', // bright white |
| 20 | + by = '\x1b[1;33m', // bright yellow |
| 21 | + bg = '\x1b[1;92m', // bright green |
| 22 | + br = '\x1b[1;91m' // bright red |
| 23 | + |
| 24 | + // Init REGEX |
| 25 | + const rePatterns = { |
| 26 | + resName: /[^/]+\/(?:css|dist)?\/?[^/]+\.(?:css|js)(?=[?#]|$)/, |
| 27 | + jsURL: /^\/\/ @require\s+(https:\/\/cdn\.jsdelivr\.net\/gh\/.+)$/, |
| 28 | + commitHash: /(@|\?v=)([^/#]+)/, sriHash: /[^#]+$/ |
| 29 | + } |
| 30 | + |
| 31 | + // Define FUNCTIONS |
| 32 | + |
| 33 | + const log = {}; |
| 34 | + ['hash', 'info', 'working', 'success', 'error'].forEach(lvl => log[lvl] = function(msg) { |
| 35 | + const logColor = lvl == 'hash' ? dg : lvl == 'info' ? bw : lvl == 'working' ? by : lvl == 'success' ? bg : br, |
| 36 | + formattedMsg = logColor + ( log.endedWithLineBreak ? msg.trimStart() : msg ) + nc |
| 37 | + console.log(formattedMsg) ; log.endedWithLineBreak = msg.toString().endsWith('\n') |
| 38 | + }) |
| 39 | + |
| 40 | + function fetchData(url) { |
| 41 | + if (typeof fetch == 'undefined') // polyfill for Node.js < v21 |
| 42 | + return new Promise((resolve, reject) => { |
| 43 | + try { // to use http or https module |
| 44 | + const protocol = url.match(/^([^:]+):\/\//)[1] |
| 45 | + if (!/^https?$/.test(protocol)) reject(new Error('Invalid fetchData() URL.')) |
| 46 | + require(protocol).get(url, resp => { |
| 47 | + let rawData = '' |
| 48 | + resp.on('data', chunk => rawData += chunk) |
| 49 | + resp.on('end', () => resolve({ json: () => JSON.parse(rawData) })) |
| 50 | + }).on('error', err => reject(new Error(err.message))) |
| 51 | + } catch (err) { reject(new Error('Environment not supported.')) |
| 52 | + }}) |
| 53 | + else // use fetch() from Node.js v21+ |
| 54 | + return fetch(url) |
| 55 | + } |
| 56 | + |
| 57 | + async function isValidResource(resURL) { |
| 58 | + try { |
| 59 | + const resIsValid = !(await (await fetchData(resURL)).text()).startsWith('Package size exceeded') |
| 60 | + if (!resIsValid) log.error(`\nInvalid resource: ${resURL}\n`) |
| 61 | + return resIsValid |
| 62 | + } catch (err) { |
| 63 | + log.error(`\nCannot validate resource: ${resURL}\n`) |
| 64 | + return null |
| 65 | + } |
| 66 | + } |
| 67 | + |
| 68 | + async function getLatestCommitHash(repo, path) { |
| 69 | + const endpoint = `https://api.github.com/repos/${repo}/commits`, |
| 70 | + latestCommitHash = (await (await fetchData(`${endpoint}?path=${ path || '' }`)).json())[0]?.sha |
| 71 | + if (latestCommitHash) log.hash(`${latestCommitHash}\n`) |
| 72 | + return latestCommitHash |
| 73 | + } |
| 74 | + |
| 75 | + async function generateSRIhash(resURL, algorithm = 'sha256') { |
| 76 | + const sriHash = ssri.fromData( |
| 77 | + Buffer.from(await (await fetchData(resURL)).arrayBuffer()), { algorithms: [algorithm] }).toString() |
| 78 | + log.hash(`${sriHash}\n`) |
| 79 | + return sriHash |
| 80 | + } |
| 81 | + |
| 82 | + function bumpUserJSver(userJSfilePath) { |
| 83 | + const date = new Date(), |
| 84 | + today = `${date.getFullYear()}.${date.getMonth() +1}.${date.getDate()}`, // YYYY.M.D format |
| 85 | + reVersion = /(@version\s+)([\d.]+)/, |
| 86 | + userJScontent = fs.readFileSync(userJSfilePath, 'utf-8'), |
| 87 | + currentVer = userJScontent.match(reVersion)[2] |
| 88 | + let newVer |
| 89 | + if (currentVer.startsWith(today)) { // bump sub-ver |
| 90 | + const verParts = currentVer.split('.'), |
| 91 | + subVer = verParts.length > 3 ? parseInt(verParts[3], 10) +1 : 1 |
| 92 | + newVer = `${today}.${subVer}` |
| 93 | + } else // bump to today |
| 94 | + newVer = today |
| 95 | + fs.writeFileSync(userJSfilePath, userJScontent.replace(reVersion, `$1${newVer}`), 'utf-8') |
| 96 | + console.log(`Updated: ${bw}v${currentVer}${nc} → ${bg}v${newVer}${nc}`) |
| 97 | + } |
| 98 | + |
| 99 | + // Run MAIN routine |
| 100 | + |
| 101 | + // Collect resourcs |
| 102 | + log.working('\nCollecting resources...\n') |
| 103 | + const userJScontent = fs.readFileSync(userJSfilePath, 'utf-8'), |
| 104 | + reResURL = new RegExp(rePatterns.jsURL.source, 'gm'), |
| 105 | + resURLs = [...userJScontent.matchAll(reResURL)].map(match => match[1] || match[2]) |
| 106 | + log.success(`${resURLs.length} potentially bumpable resource(s) found.`) |
| 107 | + |
| 108 | + // Fetch latest commit hash for adamlui/ai-web-extensions |
| 109 | + log.working('\nFetching latest commit hash for adamlui/ai-web-extensions...\n') |
| 110 | + const latestCommitHashes = { aiweb: await getLatestCommitHash('adamlui/ai-web-extensions') } |
| 111 | + |
| 112 | + log.working('\nProcessing resource(s)...\n') |
| 113 | + let urlsUpdatedCnt = 0 |
| 114 | + |
| 115 | + // Fetch latest commit hash for repo/chrom<e|ium>/extension |
| 116 | + if (resURLs.some(url => url.includes(repoName))) { |
| 117 | + console.log('Fetching latest commit hash for Chromium extension...') |
| 118 | + latestCommitHashes.chromium = await getLatestCommitHash(`adamlui/${repoName}`, 'chromium/extension') |
| 119 | + } |
| 120 | + |
| 121 | + // Process each resource |
| 122 | + for (const resURL of resURLs) { |
| 123 | + if (!await isValidResource(resURL)) continue // to next resource |
| 124 | + const resName = rePatterns.resName.exec(resURL)?.[0] || 'resource' // dir/filename for logs |
| 125 | + |
| 126 | + // Compare/update commit hash |
| 127 | + let resLatestCommitHash = latestCommitHashes[resURL.includes(repoName) ? 'chromium' : 'aiweb'] |
| 128 | + if (resLatestCommitHash.startsWith( // compare hashes |
| 129 | + rePatterns.commitHash.exec(resURL)?.[2] || '')) { // commit hash didn't change... |
| 130 | + console.log(`${resName} already up-to-date!`) ; log.endedWithLineBreak = false |
| 131 | + continue // ...so skip resource |
| 132 | + } |
| 133 | + resLatestCommitHash = resLatestCommitHash.substring(0, 7) // abbr it |
| 134 | + let updatedURL = resURL.replace(rePatterns.commitHash, `$1${resLatestCommitHash}`) // update hash |
| 135 | + if (!await isValidResource(updatedURL)) continue // to next resource |
| 136 | + |
| 137 | + // Generate/compare/update SRI hash |
| 138 | + console.log(`${ !log.endedWithLineBreak ? '\n' : '' }Generating SRI (SHA-256) hash for ${resName}...`) |
| 139 | + const newSRIhash = await generateSRIhash(updatedURL) |
| 140 | + if (rePatterns.sriHash.exec(resURL)?.[0] == newSRIhash) { // SRI hash didn't change |
| 141 | + console.log(`${resName} already up-to-date!`) ; log.endedWithLineBreak = false |
| 142 | + continue // ...so skip resource |
| 143 | + } |
| 144 | + updatedURL = updatedURL.replace(rePatterns.sriHash, newSRIhash) // update hash |
| 145 | + if (!await isValidResource(updatedURL)) continue // to next resource |
| 146 | + |
| 147 | + // Write updated URL to userscript |
| 148 | + console.log(`Writing updated URL for ${resName}...`) |
| 149 | + const userJScontent = fs.readFileSync(userJSfilePath, 'utf-8') |
| 150 | + fs.writeFileSync(userJSfilePath, userJScontent.replace(resURL, updatedURL), 'utf-8') |
| 151 | + log.success(`${resName} bumped!\n`) ; urlsUpdatedCnt++ |
| 152 | + } |
| 153 | + if (urlsUpdatedCnt > 0) { |
| 154 | + console.log(`${ !log.endedWithLineBreak ? '\n' : '' }Bumping userscript version...`) |
| 155 | + bumpUserJSver(userJSfilePath) |
| 156 | + } |
| 157 | + |
| 158 | + // Log final summary |
| 159 | + log[urlsUpdatedCnt > 0 ? 'success' : 'info']( |
| 160 | + `\n${ urlsUpdatedCnt > 0 ? 'Success! ' : '' }${urlsUpdatedCnt} resource(s) bumped.`) |
| 161 | + |
| 162 | +})() |
0 commit comments