feat(crypto-node): bundle prebuilts with the package

This commit is contained in:
alina 🌸 2024-04-13 06:59:12 +03:00
parent 908aa21f2d
commit de3d3d81ba
Signed by: teidesu
SSH key fingerprint: SHA256:uNeCpw6aTSU4aIObXLvHfLkDa82HWH9EiOj9AXOIRpI
10 changed files with 212 additions and 17 deletions

View file

@ -27,7 +27,7 @@ jobs:
run: python3 -m pip install --break-system-packages setuptools
- uses: ./.github/actions/init
- name: 'Build'
run: pnpx prebuildify --napi --strip
run: pnpx prebuildify@6.0.0 --napi --strip
working-directory: packages/crypto-node
- name: 'Upload'
uses: actions/upload-artifact@v4

View file

@ -50,6 +50,7 @@ jobs:
id: build
env:
GH_RELEASE: 1
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: node scripts/publish.js ${{ steps.find.outputs.modified }}
- name: Commit version bumps
run: |

View file

@ -80,6 +80,9 @@ jobs:
e2e:
runs-on: ubuntu-latest
needs: [test-node, test-web, test-bun]
permissions:
contents: read
actions: write
steps:
- uses: actions/checkout@v4
- name: Run end-to-end tests
@ -92,5 +95,6 @@ jobs:
continue-on-error: true
env:
NPM_TOKEN: ${{ secrets.CANARY_NPM_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REGISTRY: 'https://npm.tei.su'
run: cd e2e && ./cli.sh ci-publish

View file

@ -1,8 +1,184 @@
module.exports = ({ fs, path, packageDir, outDir }) => ({
final() {
const crypto = require('crypto')
const path = require('path')
const fs = require('fs')
const cp = require('child_process')
const { Readable } = require('stream')
const git = require('../../scripts/git-utils')
const GITHUB_TOKEN = process.env.GITHUB_TOKEN
if (!GITHUB_TOKEN) {
throw new Error('GITHUB_TOKEN is required to publish crypto-node')
}
const GITHUB_HEADERS = {
Authorization: `Bearer ${GITHUB_TOKEN}`,
'Content-Type': 'application/json',
'X-GitHub-Api-Version': '2022-11-28',
}
const API_PREFIX = 'https://api.github.com/repos/mtcute/mtcute/actions/workflows/node-prebuilt.yaml'
const PLATFORMS = ['ubuntu', 'macos', 'windows']
async function findArtifactsByHash(hash) {
const runs = await fetch(`${API_PREFIX}/runs?per_page=100`, { headers: GITHUB_HEADERS }).then((r) => r.json())
for (const run of runs.workflow_runs) {
if (run.conclusion !== 'success' || run.status !== 'completed') continue
const artifacts = await fetch(`${run.url}/artifacts`, { headers: GITHUB_HEADERS })
.then((r) => r.json())
.then((r) => r.artifacts)
for (const it of artifacts) {
const parts = it.name.split('-')
if (parts[0] === 'prebuilt' &&
PLATFORMS.includes(parts[1]) &&
parts[2] === 'latest' &&
parts[3] === hash) {
return artifacts
}
}
}
return null
}
async function runWorkflow(commit, hash) {
const createRes = await fetch(`${API_PREFIX}/dispatches`, {
method: 'POST',
headers: GITHUB_HEADERS,
body: JSON.stringify({
ref: git.getCurrentBranch(),
inputs: { commit, hash },
}),
})
if (createRes.status !== 204) {
const text = await createRes.text()
throw new Error(`Failed to run workflow: ${createRes.status} ${text}`)
}
// wait for the workflow to finish
// github api is awesome and doesn't return the run id, so let's just assume it's the last one
const runsRes = await fetch(`${API_PREFIX}/runs`, {
headers: GITHUB_HEADERS,
}).then((r) => r.json())
let run = runsRes.workflow_runs[0]
while (run.status === 'queued' || run.status === 'in_progress') {
await new Promise((resolve) => setTimeout(resolve, 5000))
run = await fetch(run.url, { headers: GITHUB_HEADERS }).then((r) => r.json())
}
if (run.status !== 'completed') {
throw new Error(`Workflow ${run.id} failed: ${run.status}`)
}
if (run.conclusion !== 'success') {
throw new Error(`Workflow ${run.id} failed: ${run.conclusion}`)
}
// fetch artifacts
const artifacts = await fetch(`${run.url}/artifacts`, { headers: GITHUB_HEADERS })
.then((r) => r.json())
.then((r) => r.artifacts)
// validate their names
for (const it of artifacts) {
const parts = it.name.split('-')
if (parts[0] !== 'prebuilt' ||
!PLATFORMS.includes(parts[1]) ||
parts[2] !== 'latest' ||
parts[3] !== hash) {
throw new Error(`Invalid artifact name: ${it.name}`)
}
}
return artifacts
}
async function extractArtifacts(artifacts) {
fs.mkdirSync(path.join(__dirname, 'dist/prebuilds'), { recursive: true })
await Promise.all(
artifacts.map(async (it) => {
const platform = it.name.split('-')[1]
const res = await fetch(it.archive_download_url, {
headers: GITHUB_HEADERS,
redirect: 'manual',
})
if (res.status !== 302) {
const text = await res.text()
throw new Error(`Failed to download artifact ${it.name}: ${res.status} ${text}`)
}
const zip = await fetch(res.headers.get('location'))
const outFile = path.join(__dirname, 'dist/prebuilds', `${platform}.zip`)
const stream = fs.createWriteStream(outFile)
await new Promise((resolve, reject) => {
stream.on('finish', resolve)
stream.on('error', reject)
Readable.fromWeb(zip.body).pipe(stream)
})
// extract the zip
await new Promise((resolve, reject) => {
const child = cp.spawn('unzip', [outFile, '-d', path.join(__dirname, 'dist/prebuilds')], {
stdio: 'inherit',
})
child.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Failed to extract ${outFile}: ${code}`))
} else {
resolve()
}
})
})
fs.unlinkSync(outFile)
}),
)
}
module.exports = ({ fs, glob, path, packageDir, outDir }) => ({
async final() {
const libDir = path.join(packageDir, 'lib')
// generate sources hash
const hashes = []
for (const file of glob.sync(path.join(libDir, '**/*'))) {
const hash = crypto.createHash('sha256')
hash.update(fs.readFileSync(file))
hashes.push(hash.digest('hex'))
}
const hash = crypto.createHash('sha256')
.update(hashes.join('\n'))
.digest('hex')
console.log(hash)
console.log('[i] Checking for prebuilt artifacts for %s', hash)
let artifacts = await findArtifactsByHash(hash)
if (!artifacts) {
console.log('[i] No artifacts found, running workflow')
artifacts = await runWorkflow(git.getCurrentCommit(), hash)
}
console.log('[i] Extracting artifacts')
await extractArtifacts(artifacts)
// copy native sources and binding.gyp file
fs.cpSync(path.join(packageDir, 'lib'), path.join(outDir, 'lib'), { recursive: true })
fs.cpSync(libDir, path.join(outDir, 'lib'), { recursive: true })
const bindingGyp = fs.readFileSync(path.join(packageDir, 'binding.gyp'), 'utf8')
fs.writeFileSync(

View file

@ -10,7 +10,7 @@
"sideEffects": false,
"scripts": {
"build": "pnpm run -w build-package crypto-node",
"install": "node-gyp configure && node-gyp -j 16 build",
"install": "node-gyp-build",
"rebuild": "node-gyp configure && node-gyp -j 16 rebuild",
"clean": "node-gyp clean"
},
@ -34,7 +34,8 @@
}
},
"dependencies": {
"@mtcute/node": "workspace:^"
"@mtcute/node": "workspace:^",
"node-gyp-build": "4.8.0"
},
"devDependencies": {
"@mtcute/test": "workspace:^"

View file

@ -1,10 +1,3 @@
/* eslint-disable no-restricted-globals */
let native
try {
native = require('../build/Release/crypto')
} catch (e) {
native = require('../build/Debug/crypto')
}
const native = require('node-gyp-build')(`${__dirname}/..`)
module.exports = { native }

View file

@ -217,6 +217,9 @@ importers:
'@mtcute/node':
specifier: workspace:^
version: link:../node
node-gyp-build:
specifier: 4.8.0
version: 4.8.0
devDependencies:
'@mtcute/test':
specifier: workspace:^
@ -4730,6 +4733,11 @@ packages:
semver: 7.5.1
dev: false
/node-gyp-build@4.8.0:
resolution: {integrity: sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==}
hasBin: true
dev: false
/node-gyp@9.3.1:
resolution: {integrity: sha512-4Q16ZCqq3g8awk6UplT7AuxQ35XN4R/yf/+wSAwcBUAjg7l58RTactWaP8fIDTi0FzI7YcVLujwExakZlfWkXg==}
engines: {node: ^12.13 || ^14.13 || >=16}

View file

@ -270,6 +270,6 @@ fs.cpSync(path.join(__dirname, '../LICENSE'), path.join(outDir, 'LICENSE'))
fs.writeFileSync(path.join(outDir, '.npmignore'), '*.tsbuildinfo\n')
buildConfig.final()
console.log('[v] Done!')
Promise.resolve(buildConfig.final()).then(() => {
console.log('[v] Done!')
})

View file

@ -49,6 +49,14 @@ function getCommitsSince(tag, until = 'HEAD') {
return items.reverse()
}
function getCurrentCommit() {
return cp.execSync('git rev-parse HEAD', { encoding: 'utf8', stdio: 'pipe' }).trim()
}
function getCurrentBranch() {
return cp.execSync('git rev-parse --abbrev-ref HEAD', { encoding: 'utf8', stdio: 'pipe' }).trim()
}
function parseConventionalCommit(msg) {
const match = msg.match(/^(\w+)(?:\(([^)]+)\))?(!?): (.+)$/)
@ -64,4 +72,6 @@ module.exports = {
findChangedFilesSince,
getCommitsSince,
parseConventionalCommit,
getCurrentCommit,
getCurrentBranch,
}

View file

@ -147,6 +147,8 @@ async function main(arg = process.argv[2]) {
fs.writeFileSync(process.env.GITHUB_OUTPUT, `tarballs=${tarballs.join(',')}\n`, { flag: 'a' })
}
process.exit(0) // idk why but it sometimes hangs indefinitely
}
exports.main = main