From f180e94998348c83316ff528ec92300a506457a6 Mon Sep 17 00:00:00 2001 From: Travis Shivers Date: Wed, 14 Oct 2020 16:55:13 -0500 Subject: [PATCH] ci(update): port update.sh to nodejs --- functions.sh | 126 --------------------------- genMatrix.js | 23 +---- update.js | 62 ++++++++++++++ update.sh | 235 --------------------------------------------------- updateLib.js | 177 ++++++++++++++++++++++++++++++++++++++ utils.js | 27 ++++++ 6 files changed, 268 insertions(+), 382 deletions(-) create mode 100755 update.js delete mode 100755 update.sh create mode 100644 updateLib.js create mode 100644 utils.js diff --git a/functions.sh b/functions.sh index be9c57539..e82b4f738 100755 --- a/functions.sh +++ b/functions.sh @@ -153,44 +153,6 @@ function get_versions() { fi } -function is_alpine() { - local variant - variant=${1} - shift - - if [ "${variant}" = "${variant#alpine}" ]; then - return 1 - fi -} - -function is_debian() { - local variant - variant=$1 - shift - - IFS=' ' read -ra debianVersions <<< "$(get_config "./" "debian_versions")" - for d in "${debianVersions[@]}"; do - if [ "${d}" = "${variant}" ]; then - return 0 - fi - done - return 1 -} - -function is_debian_slim() { - local variant - variant=$1 - shift - - IFS=' ' read -ra debianVersions <<< "$(get_config "./" "debian_versions")" - for d in "${debianVersions[@]}"; do - if [ "${d}-slim" = "${variant}" ]; then - return 0 - fi - done - return 1 -} - function get_fork_name() { local version version=$1 @@ -202,24 +164,6 @@ function get_fork_name() { fi } -function get_full_tag() { - local variant - local tag - local full_tag - variant="$1" - shift - tag="$1" - shift - if [ -z "${variant}" ]; then - full_tag="${tag}" - elif [ "${variant}" = "default" ]; then - full_tag="${tag}" - else - full_tag="${tag}-${variant}" - fi - echo "${full_tag}" -} - function get_full_version() { local version version=$1 @@ -246,25 +190,6 @@ function get_major_minor_version() { echo "$(echo "${fullversion}" | cut -d'.' -f1).$(echo "${fullversion}" | cut -d'.' -f2)" } -function get_path() { - local version - local variant - local path - version="$1" - shift - variant="$1" - shift - - if [ -z "${variant}" ]; then - path="${version}/${variant}" - elif [ "${variant}" = "default" ]; then - path="${version}" - else - path="${version}/${variant}" - fi - echo "${path}" -} - function get_tag() { local version version=$1 @@ -308,54 +233,3 @@ function sort_versions() { echo "${sorted[@]}" } - -function commit_range() { - local commit_id_end=${1} - shift - local commit_id_start=${1} - - if [ -z "${commit_id_start}" ]; then - if [ -z "${commit_id_end}" ]; then - echo "HEAD~1..HEAD" - elif [[ "${commit_id_end}" =~ .. ]]; then - echo "${commit_id_end}" - else - echo "${commit_id_end}~1..${commit_id_end}" - fi - else - echo "${commit_id_end}..${commit_id_start}" - fi -} - -function images_updated() { - local commit_range - local versions - local images_changed - - commit_range="$(commit_range "$@")" - - IFS=' ' read -ra versions <<< "$( - IFS=',' - get_versions - )" - images_changed=$(git diff --name-only "${commit_range}" "${versions[@]}") - - if [ -z "${images_changed}" ]; then - return 1 - fi - return 0 -} - -function tests_updated() { - local commit_range - local test_changed - - commit_range="$(commit_range "$@")" - - test_changed=$(git diff --name-only "${commit_range}" test*) - - if [ -z "${test_changed}" ]; then - return 1 - fi - return 0 -} diff --git a/genMatrix.js b/genMatrix.js index 9f57ea509..0d47c3529 100644 --- a/genMatrix.js +++ b/genMatrix.js @@ -1,31 +1,15 @@ 'use strict'; const path = require('path'); -const fs = require('fs'); +const { getAllDockerfiles, getDockerfileNodeVersion } = require('./utils'); const testFiles = [ 'genMatrix.js', '.github/workflows/build-test.yml', ]; -const nodeDirRegex = /^\d+$/; - const areTestFilesChanged = (changedFiles) => changedFiles .some((file) => testFiles.includes(file)); -// Returns a list of the child directories in the given path -const getChildDirectories = (parent) => fs.readdirSync(parent, { withFileTypes: true }) - .filter((dirent) => dirent.isDirectory()) - .map(({ name }) => path.resolve(parent, name)); - -const getNodeVerionDirs = (base) => getChildDirectories(base) - .filter((childPath) => nodeDirRegex.test(path.basename(childPath))); - -// Returns the paths of Dockerfiles that are at: base/*/Dockerfile -const getDockerfilesInChildDirs = (base) => getChildDirectories(base) - .map((childDir) => path.resolve(childDir, 'Dockerfile')); - -const getAllDockerfiles = (base) => getNodeVerionDirs(base).flatMap(getDockerfilesInChildDirs); - const getAffectedDockerfiles = (filesAdded, filesModified, filesRenamed) => { const files = [ ...filesAdded, @@ -52,13 +36,10 @@ const getAffectedDockerfiles = (filesAdded, filesModified, filesRenamed) => { ]; }; -const getFullNodeVersionFromDockerfile = (file) => fs.readFileSync(file, 'utf8') - .match(/^ENV NODE_VERSION (\d*\.*\d*\.\d*)/m)[1]; - const getDockerfileMatrixEntry = (file) => { const [variant] = path.dirname(file).split(path.sep).slice(-1); - const version = getFullNodeVersionFromDockerfile(file); + const version = getDockerfileNodeVersion(file); return { version, diff --git a/update.js b/update.js new file mode 100755 index 000000000..086146fe8 --- /dev/null +++ b/update.js @@ -0,0 +1,62 @@ +#!/usr/bin/env node +'use strict'; +const update = require('./updateLib'); + +const usage = ` + Update the node docker images. + + Usage: + ./update.js [ OPTIONS ] + + OPTIONS: + -h, --help\tthis message + -a, --all\tupdate all images even if no node version update`; + +const printUsage = () => { + console.log(usage); +}; + +const runUpdate = async (updateAll) => { + const updated = await update(updateAll); + + updated.forEach(({ file }) => { + console.log('Updated', file); + }); + + if (!updated.length) { + console.log('Nothing updated'); + } +}; + +const main = async () => { + if (process.argv.length > 3) { + printUsage(); + process.exit(1); + } + + if (process.argv.length === 2) { + await runUpdate(false); + return; + } + + switch (process.argv[2]) { + case '-a': + case '--all': + await runUpdate(true); + return; + + case '-h': + case '--help': + printUsage(); + return; + + default: + printUsage(); + process.exit(1); + } +}; + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/update.sh b/update.sh deleted file mode 100755 index 507af21a7..000000000 --- a/update.sh +++ /dev/null @@ -1,235 +0,0 @@ -#!/usr/bin/env bash - -set -ue - -function usage() { - cat << EOF - - Update the node docker images. - - Usage: - $0 [-s] [MAJOR_VERSION(S)] [VARIANT(S)] - - Examples: - - update.sh # Update all images - - update.sh -s # Update all images, skip updating Alpine and Yarn - - update.sh 8,10 # Update all variants of version 8 and 10 - - update.sh -s 8 # Update version 8 and variants, skip updating Alpine and Yarn - - update.sh 8 buster-slim,buster # Update only buster's slim and buster variants for version 8 - - update.sh -s 8 stretch # Update only stretch variant for version 8, skip updating Alpine and Yarn - - update.sh . alpine # Update the alpine variant for all versions - - OPTIONS: - -s Security update; skip updating the yarn and alpine versions. - -b CI config update only - -h Show this message - -EOF -} - -SKIP=false -while getopts "sh" opt; do - case "${opt}" in - s) - SKIP=true - shift - ;; - h) - usage - exit - ;; - \?) - usage - exit - ;; - esac -done - -. functions.sh - -cd "$(cd "${0%/*}" && pwd -P)" - -IFS=',' read -ra versions_arg <<< "${1:-}" -IFS=',' read -ra variant_arg <<< "${2:-}" - -IFS=' ' read -ra versions <<< "$(get_versions .)" -IFS=' ' read -ra update_versions <<< "$(get_versions . "${versions_arg[@]:-}")" -IFS=' ' read -ra update_variants <<< "$(get_variants . "${variant_arg[@]:-}")" -if [ ${#versions[@]} -eq 0 ]; then - fatal "No valid versions found!" -fi - -# Global variables -# Get architecure and use this as target architecture for docker image -# See details in function.sh -# TODO: Should be able to specify target architecture manually -arch=$(get_arch) - -if [ "${SKIP}" != true ]; then - alpine_version=$(get_config "./" "alpine_version") - yarnVersion="$(curl -sSL --compressed https://yarnpkg.com/latest-version)" -fi - -function in_versions_to_update() { - local version=$1 - - if [ "${#update_versions[@]}" -eq 0 ]; then - echo 0 - return - fi - - for version_to_update in "${update_versions[@]}"; do - if [ "${version_to_update}" = "${version}" ]; then - echo 0 - return - fi - done - - echo 1 -} - -function in_variants_to_update() { - local variant=$1 - - if [ "${#update_variants[@]}" -eq 0 ]; then - echo 0 - return - fi - - for variant_to_update in "${update_variants[@]}"; do - if [ "${variant_to_update}" = "${variant}" ]; then - echo 0 - return - fi - done - - echo 1 -} - -function update_node_version() { - - local baseuri=${1} - shift - local version=${1} - shift - local template=${1} - shift - local dockerfile=${1} - shift - local variant="" - if [ $# -eq 1 ]; then - variant=${1} - shift - fi - - fullVersion="$(curl -sSL --compressed "${baseuri}" | grep ' /dev/null; then - echo "${dockerfile} is already up to date!" - else - echo "${dockerfile} updated!" - fi - - mv -f "${dockerfile}-tmp" "${dockerfile}" - ) -} - -for version in "${versions[@]}"; do - parentpath=$(dirname "${version}") - versionnum=$(basename "${version}") - baseuri=$(get_config "${parentpath}" "baseuri") - update_version=$(in_versions_to_update "${version}") - - [ "${update_version}" -eq 0 ] && info "Updating version ${version}..." - - # Get supported variants according the target architecture - # See details in function.sh - IFS=' ' read -ra variants <<< "$(get_variants "${parentpath}")" - - if [ -f "${version}/Dockerfile" ]; then - if [ "${update_version}" -eq 0 ]; then - update_node_version "${baseuri}" "${versionnum}" "${parentpath}/Dockerfile.template" "${version}/Dockerfile" & - fi - fi - - for variant in "${variants[@]}"; do - # Skip non-docker directories - [ -f "${version}/${variant}/Dockerfile" ] || continue - - update_variant=$(in_variants_to_update "${variant}") - template_file="${parentpath}/Dockerfile-${variant}.template" - - if is_debian "${variant}"; then - template_file="${parentpath}/Dockerfile-debian.template" - elif is_debian_slim "${variant}"; then - template_file="${parentpath}/Dockerfile-slim.template" - elif is_alpine "${variant}"; then - template_file="${parentpath}/Dockerfile-alpine.template" - fi - - cp "${parentpath}/docker-entrypoint.sh" "${version}/${variant}/docker-entrypoint.sh" - if [ "${update_version}" -eq 0 ] && [ "${update_variant}" -eq 0 ]; then - update_node_version "${baseuri}" "${versionnum}" "${template_file}" "${version}/${variant}/Dockerfile" "${variant}" & - fi - done -done - -wait -info "Done!" diff --git a/updateLib.js b/updateLib.js new file mode 100644 index 000000000..5bf011dd5 --- /dev/null +++ b/updateLib.js @@ -0,0 +1,177 @@ +'use strict'; +const https = require('https'); +const path = require('path'); +const { readFileSync, writeFileSync } = require('fs'); +const { getAllDockerfiles, getDockerfileNodeVersion } = require('./utils'); + +const releaseUrl = 'https://nodejs.org/dist/index.json'; +const yarnVersionUrl = 'https://classic.yarnpkg.com/latest-version'; + +const templates = Object.freeze({ + alpine: 1, + debian: 2, + debianSlim: 3, +}); + +const templateFileMap = Object.freeze({ + [templates.alpine]: 'Dockerfile-alpine.template', + [templates.debian]: 'Dockerfile-debian.template', + [templates.debianSlim]: 'Dockerfile-slim.template', +}); + +const templateRepoMap = Object.freeze({ + [templates.alpine]: 'alpine', + [templates.debian]: 'buildpack-deps', + [templates.debianSlim]: 'debian', +}); + +const fetchText = (url) => new Promise((resolve, reject) => { + https.get(url, (res) => { + const { statusCode } = res; + + if (statusCode < 200 || statusCode >= 300) { + // Consume response data to free up memory + res.resume(); + reject(new Error(`Request Failed.\nStatus Code: ${statusCode}`)); + return; + } + + res.setEncoding('utf8'); + let rawData = ''; + res.on('data', (chunk) => { + rawData += chunk; + }); + + res.on('end', () => { + resolve(rawData); + }); + }).on('error', (e) => { + reject(e); + }).end(); +}); + +const fetchJson = async (url) => { + const text = await fetchText(url); + return JSON.parse(text); +}; + +// nodeVersions is sorted +const getLatestNodeVersion = (nodeVersions, majorVersion) => nodeVersions + .find((version) => version.startsWith(`${majorVersion}.`)); + +const getTemplate = (variant) => { + if (variant.startsWith('alpine')) { + return templates.alpine; + } + + if (variant.endsWith('-slim')) { + return templates.debianSlim; + } + + return templates.debian; +}; + +const getDockerfileMetadata = (nodeVersions, file) => { + const [nodeMajorVersion, variant] = path.dirname(file).split(path.sep).slice(-2); + const fileNodeVersion = getDockerfileNodeVersion(file); + + return { + file, + variant, + fileNodeVersion, + nodeMajorVersion, + latestVersion: getLatestNodeVersion(nodeVersions, nodeMajorVersion), + template: getTemplate(variant), + }; +}; + +const isDockerfileOutdated = ({ fileNodeVersion, latestVersion }) => fileNodeVersion + !== latestVersion; + +const fetchLatestNodeVersions = async () => { + const nodeDist = await fetchJson(releaseUrl); + return nodeDist.map(({ version }) => version.substring(1)); +}; + +const findOutdated = async (updateAll) => { + const nodeVersions = await fetchLatestNodeVersions(); + + const dockerfileMetadatas = getAllDockerfiles(__dirname) + .map((file) => getDockerfileMetadata(nodeVersions, file)); + + return updateAll + ? dockerfileMetadatas + : dockerfileMetadatas.filter(isDockerfileOutdated); +}; + +const getKeys = (basename) => readFileSync(path.resolve(__dirname, 'keys', basename)) + .toString().trim().split('\n'); + +const readTemplate = (template) => readFileSync( + path.resolve(__dirname, templateFileMap[template]), +).toString(); + +const getBaseImage = ({ template, variant }) => { + const tag = template === templates.alpine + ? variant.replace(/alpine/, '') + : variant; + + return `${templateRepoMap[template]}:${tag}`; +}; + +const formatKeys = (keys) => keys.map((key) => `$1${key} \\`).join('\n'); + +const formatTemplate = (yarnVersion, nodeKeys, yarnKeys, muslChecksum, base, metadata) => { + const { latestVersion, template, nodeMajorVersion } = metadata; + const baseImage = getBaseImage(metadata); + const initialFormat = base.replace(/^FROM.+$/m, `FROM ${baseImage}`) + .replace(/^ENV NODE_VERSION .+$/m, `ENV NODE_VERSION ${latestVersion}`) + .replace(/^ENV YARN_VERSION .+$/m, `ENV YARN_VERSION ${yarnVersion}`) + .replace(/^(\s*)"\${NODE_KEYS\[@]}".*$/m, formatKeys(nodeKeys)) + .replace(/^(\s*)"\${YARN_KEYS\[@]}".*$/m, formatKeys(yarnKeys)); + + if (template !== templates.alpine) { + return initialFormat; + } + + const pythonVersion = parseInt(nodeMajorVersion, 10) < 14 + ? 'python2' + : 'python3'; + + return initialFormat.replace(/\${PYTHON_VERSION}/m, pythonVersion) + .replace(/CHECKSUM=CHECKSUM_x64/m, `CHECKSUM="${muslChecksum}"`); +}; + +const fetchMuslChecksum = async (nodeVersion) => { + const checksums = await fetchText( + `https://unofficial-builds.nodejs.org/download/release/v${nodeVersion}/SHASUMS256.txt`, + ); + return checksums.match(/(\S+)\s+\S+-linux-x64-musl.tar.xz/m)[1]; +}; + +const updateDockerfile = async (yarnVersion, nodeKeys, yarnKeys, metadata) => { + const { file, template, latestVersion } = metadata; + const base = readTemplate(template); + const muslChecksum = await fetchMuslChecksum(latestVersion); + + const formatted = formatTemplate(yarnVersion, nodeKeys, yarnKeys, muslChecksum, base, metadata); + writeFileSync(file, formatted); +}; + +const updateDockerfiles = async (outdated) => { + const yarnVersion = await fetchText(yarnVersionUrl); + const nodeKeys = getKeys('node.keys'); + const yarnKeys = getKeys('yarn.keys'); + + await Promise.all( + outdated.map((metadata) => updateDockerfile(yarnVersion, nodeKeys, yarnKeys, metadata)), + ); +}; + +const update = async (updateAll) => { + const outdated = await findOutdated(updateAll); + await updateDockerfiles(outdated); + return outdated; +}; + +module.exports = update; diff --git a/utils.js b/utils.js new file mode 100644 index 000000000..4664ce5a1 --- /dev/null +++ b/utils.js @@ -0,0 +1,27 @@ +'use strict'; +const path = require('path'); +const { readFileSync, readdirSync } = require('fs'); + +const nodeDirRegex = /^\d+$/; + +// Returns a list of the child directories in the given path +const getChildDirectories = (parent) => readdirSync(parent, { withFileTypes: true }) + .filter((dirent) => dirent.isDirectory()) + .map(({ name }) => path.resolve(parent, name)); + +const getNodeVersionDirs = (base) => getChildDirectories(base) + .filter((childPath) => nodeDirRegex.test(path.basename(childPath))); + +// Returns the paths of Dockerfiles that are at: base/*/Dockerfile +const getDockerfilesInChildDirs = (base) => getChildDirectories(base) + .map((childDir) => path.resolve(childDir, 'Dockerfile')); + +const getAllDockerfiles = (base) => getNodeVersionDirs(base).flatMap(getDockerfilesInChildDirs); + +const getDockerfileNodeVersion = (file) => readFileSync(file, 'utf8') + .match(/^ENV NODE_VERSION (\d*\.*\d*\.\d*)/m)[1]; + +module.exports = { + getAllDockerfiles, + getDockerfileNodeVersion, +};