diff --git a/scripts/_download.js b/scripts/_download.js index ebd2a2d6..eb8f9e84 100644 --- a/scripts/_download.js +++ b/scripts/_download.js @@ -7,6 +7,9 @@ import unzipper from "unzipper"; import * as tar from "tar"; import { bold, green, yellow } from "colorette"; import { chmodOwnerPlusX } from "./_util.js"; +import { fetchFirstSuccessfulRun, fetchArtifactsForRun } from "./_github.js"; + +import { globby } from "globby"; const GITHUB_API = "https://api.github.com"; @@ -48,6 +51,10 @@ export async function getGitHubReleaseTagSha(org, repo, releaseTag) { }, }); + if (!response.ok) { + throw new Error(`Failed to fetch release tag sha: ${response.statusText}`); + } + return await response.text(); } @@ -68,12 +75,8 @@ async function streamResponseToFile(targetFile, fetchResponse) { const pipeToHash = pipeline(reader, hash); // run them both, reject if either rejects, resolve with the hash - try { - await Promise.all([pipeToFile, pipeToHash]); - return Promise.resolve(hash.digest("hex")); - } catch (e) { - return Promise.reject(e); - } + await Promise.all([pipeToFile, pipeToHash]); + return hash.digest("hex"); } export async function downloadAndExtractGitHubAsset( @@ -101,11 +104,11 @@ export async function downloadAndExtractGitHubAsset( await zipFile.extract({ path: assetDir }); const extractedFiles = await fs.readdir(assetDir); - extractedFiles.forEach(async (file) => { + for (const file of extractedFiles) { if (chmod && extname(file) !== ".zip") { chmodOwnerPlusX(join(assetDir, file)); } - }); + } console.log(`Extracted: ${gitHubReleaseAsset.name}`); } @@ -118,47 +121,43 @@ export async function downloadGitHubRelease({ releaseTag, assets, }) { - try { - const releaseData = await getGitHubReleaseMetadata(org, repo, releaseTag); - const commitId = await getGitHubReleaseTagSha(org, repo, releaseTag); - const releaseAssets = releaseData.assets; - - const metadata = { - org, - repo, - releaseTag, - commitId, - collectedAt: new Date().toISOString(), - assets: [], - }; - - for (const { name, platform, arch, chmod } of assets) { - const releaseAsset = releaseAssets.find((a) => a.name.toLowerCase() === name.toLowerCase()); - if (releaseAsset) { - try { - console.group(yellow(releaseAsset.name)); - await downloadAndExtractGitHubAsset(targetDirectory, releaseAsset, platform, arch, chmod); - } finally { - console.groupEnd(); - } - metadata.assets.push({ - name: releaseAsset.name, - platform, - arch, - chmod: !!chmod, - updatedAt: releaseAsset.updated_at, - }); - } else { - console.warn(`Asset [${name}] was not found in GitHub release ${releaseData.html_url}`); + const releaseData = await getGitHubReleaseMetadata(org, repo, releaseTag); + const commitId = await getGitHubReleaseTagSha(org, repo, releaseTag); + const releaseAssets = releaseData.assets; + + const metadata = { + org, + repo, + releaseTag, + commitId, + collectedAt: new Date().toISOString(), + assets: [], + }; + + for (const { name, platform, arch, chmod } of assets) { + const releaseAsset = releaseAssets.find((a) => a.name.toLowerCase() === name.toLowerCase()); + if (releaseAsset) { + try { + console.group(yellow(releaseAsset.name)); + await downloadAndExtractGitHubAsset(targetDirectory, releaseAsset, platform, arch, chmod); + } finally { + console.groupEnd(); } + metadata.assets.push({ + name: releaseAsset.name, + platform, + arch, + chmod: !!chmod, + updatedAt: releaseAsset.updated_at, + }); + } else { + console.warn(`Asset [${name}] was not found in GitHub release ${releaseData.html_url}`); } - - await fs.writeJson(metaFile, metadata, { spaces: 2 }); - console.log(`Metadata written to ${metaFile}`); - console.log(`All assets downloaded to: ${targetDirectory}`); - } catch (error) { - console.error("Error downloading the release:", error.message); } + + await fs.writeJson(metaFile, metadata, { spaces: 2 }); + console.log(`Metadata written to ${metaFile}`); + console.log(`All assets downloaded to: ${targetDirectory}`); } export async function downloadAndExtractTarGz({ targetDirectory, url, sha256 }) { @@ -203,3 +202,168 @@ export async function downloadAndExtractTarGz({ targetDirectory, url, sha256 }) console.groupEnd(); } } + +/** + * Download a workflow artifact from GitHub + * + * @param {string} url - Download URL for the artifact + * @param {string} name - Name of the artifact file + * @param {string} outputDir - Directory to save the downloaded artifact + * @returns {Promise} - Path to the downloaded artifact file + */ +export async function downloadArtifact(url, name, outputDir, token) { + const assetDir = join(outputDir); + const assetFileName = join(assetDir, `${name}`); + await fs.ensureDir(assetDir); + + console.log(`Downloading asset: ${name}`); + const response = await fetch(url, { + headers: { + Accept: "application/vnd.github.v3+json", + Authorization: `Bearer ${token}`, + }, + }); + + if (!response.ok) { + throw new Error(`Failed to download ${name}: ${response.statusText}`); + } + + const sha256 = await streamResponseToFile(assetFileName, response); + console.log(`Verifying download of: ${name}`); + console.log(`Asset sha256: ${green(sha256)}`); + + console.log(`Downloaded ${name} to ${assetFileName}`); + return assetFileName; +} + +/** + * Extract an artifact ZIP file. + * + * @param {string} filePath - Path to the ZIP file + * @param {string} tempDir - Temporary directory for extraction + * @param {string} finalDir - Final directory for extracted files + */ +export async function extractArtifact(filePath, tempDir, finalDir) { + console.log(`Checking if ${filePath} is a valid ZIP archive.`); + + // First extraction to temporary directory + console.log(`Extracting ${filePath} to temporary directory: ${tempDir}`); + const unzipArtifact = await unzipper.Open.file(filePath); + await unzipArtifact.extract({ path: tempDir }); + + // Find and extract any nested zip files in the temp directory + const zipFiles = await globby("*.zip", { cwd: tempDir }); + for (const file of zipFiles) { + const nestedZipPath = join(tempDir, file); + console.log(`Extracting nested zip: ${nestedZipPath} to ${finalDir}`); + + const unzipNestedArtifact = await unzipper.Open.file(nestedZipPath); + await unzipNestedArtifact.extract({ path: finalDir }); + + console.log(`Deleting nested zip: ${nestedZipPath}`); + await fs.unlink(nestedZipPath); + } + + // Cleanup temporary directory + console.log(`Cleaning up temporary directory: ${tempDir}`); + await fs.rm(tempDir, { recursive: true, force: true }); + + // Cleanup the original zip file + console.log(`Deleting original zip file: ${filePath}`); + await fs.unlink(filePath); + + console.log(`Extraction complete: ${filePath} -> ${finalDir}`); +} + +/** + * Download and process workflow artifacts. + * + * @param {string} targetDirectory - Directory to store downloaded artifacts + * @param {string} metaFile - Path to the metadata file + * @param {string} org - GH org + * @param {string} repo - GH repo + * @param {string} workflow - Name of the workflow file + * @param {Array} assets - List of assets to download + */ +export async function downloadWorkflowArtifacts({ + targetDirectory, + metaFile, + org, + repo, + workflow, + assets, +}) { + const GITHUB_TOKEN = process.env.GITHUB_TOKEN; + + if (!GITHUB_TOKEN) { + const errorMessage = "GITHUB_TOKEN environment variable is not set."; + console.error(errorMessage); + throw new Error(errorMessage); + } + + console.log("Fetching first successful workflow run..."); + const { workflowRunId, headSha } = await fetchFirstSuccessfulRun(org, repo, workflow); + + if (!workflowRunId) { + throw new Error("No successful workflow runs found."); + } + + console.log(`Workflow Run ID: ${workflowRunId}`); + const artifactUrls = await fetchArtifactsForRun(org, repo, workflowRunId); + + if (!artifactUrls || artifactUrls.length === 0) { + throw new Error("No artifacts found for the workflow run."); + } + + const metadata = { + org, + repo, + workflow, + workflowRunId, + commitId: headSha, + collectedAt: new Date().toISOString(), + assets: [], + }; + + for (const asset of assets) { + const { name } = asset; + const artifact = artifactUrls.find((a) => a.name === name); + + if (!artifact) { + console.warn(`Asset [${name}] was not found in the workflow artifacts.`); + continue; + } + + try { + console.log(`Processing artifact: ${name}`); + + const downloadedFilePath = await downloadArtifact( + artifact.url, + name, + targetDirectory, + GITHUB_TOKEN, + ); + + // Extract the artifact + const tempDir = join(targetDirectory, "temp"); + const extractionDir = join(targetDirectory, name.replace(/\.zip$/, "")); + await extractArtifact(downloadedFilePath, tempDir, extractionDir); + + for (const file of await globby(["*", "!*.zip"], { cwd: extractionDir, absolute: true })) { + chmodOwnerPlusX(file); + } + + metadata.assets.push({ + name, + extractionDir, + downloadedAt: new Date().toISOString(), + }); + } catch (error) { + console.error(`Error processing artifact [${name}]:`, error.message); + throw new Error(`Failed to process artifact ${name}: ${error.message}`); + } + } + + await fs.writeJson(metaFile, metadata, { spaces: 2 }); + console.log(`Metadata written to ${metaFile}`); +} diff --git a/scripts/_github.js b/scripts/_github.js new file mode 100644 index 00000000..fb82cfa1 --- /dev/null +++ b/scripts/_github.js @@ -0,0 +1,99 @@ +/** + * Fetch the most recent successful workflow run for the branch head. + * + * @param {string} org - GitHub organization or user. + * @param {string} repo - GitHub repository name. + * @param {string} workflowFile - Name of the workflow file. + * @returns {Promise<{runId: string, headSha: string} | null>} - Object containing the workflow run ID and head SHA, or null if not found. + */ +export async function fetchFirstSuccessfulRun(org, repo, workflowFile) { + try { + console.log(`Fetching head commit SHA for branch: main`); + const res = await fetch(`https://api.github.com/repos/${org}/${repo}/branches/main`, { + headers: { + Accept: "application/vnd.github.v3+json", + }, + }); + + if (!res.ok) { + throw new Error(`Failed to fetch branch head SHA: ${res.statusText}`); + } + + const data = await res.json(); + const headSha = data.commit.sha; + console.log(`Head commit SHA: ${headSha}`); + + console.log(`Fetching workflow runs for head SHA: ${headSha}`); + const workflowResponse = await fetch( + `https://api.github.com/repos/${org}/${repo}/actions/workflows/${workflowFile}/runs?head_sha=${headSha}&per_page=1`, + { + headers: { + Accept: "application/vnd.github.v3+json", + }, + }, + ); + + if (!workflowResponse.ok) { + throw new Error(`Failed to fetch workflow runs: ${workflowResponse.statusText}`); + } + + const workflowData = await workflowResponse.json(); + if (workflowData.workflow_runs.length === 0) { + console.log("No successful workflow runs found for the head commit."); + return null; + } + + const workflowRun = workflowData.workflow_runs[0]; + if (workflowRun.status !== "completed" || workflowRun.conclusion !== "success") { + console.log( + `Workflow run for head commit is not successful: Status = ${workflowRun.status}, Conclusion = ${workflowRun.conclusion}`, + ); + return null; + } + + console.log(`First Successful Workflow Run ID for head commit: ${workflowRun.id}`); + return { + workflowRunId: workflowRun.id, + headSha: headSha, + }; + } catch (error) { + console.error("Error fetching workflow runs:", error.message); + return null; + } +} + +/** + * Fetch artifacts for a specific workflow run. + * + * @param {string} org - GitHub organization or user. + * @param {string} repo - GitHub repository name. + * @param {string} runId - ID of the workflow run. + * @returns {Promise} - List of artifacts with download URLs. + */ +export async function fetchArtifactsForRun(org, repo, runId) { + try { + const response = await fetch( + `https://api.github.com/repos/${org}/${repo}/actions/runs/${runId}/artifacts`, + { + headers: { + Accept: "application/vnd.github.v3+json", + }, + }, + ); + + if (!response.ok) { + throw new Error(`Failed to fetch artifacts: ${response.statusText}`); + } + + const data = await response.json(); + const downloadUrls = data.artifacts.map((artifact) => ({ + name: artifact.name, + url: artifact.archive_download_url, + })); + console.log("Artifact Download URLs:", downloadUrls); + return downloadUrls; + } catch (error) { + console.error("Error fetching artifacts:", error.message); + return []; + } +} diff --git a/scripts/collect-assets.js b/scripts/collect-assets.js index 7ef7ccad..4a0a6545 100755 --- a/scripts/collect-assets.js +++ b/scripts/collect-assets.js @@ -2,7 +2,11 @@ import { ensureDir } from "fs-extra/esm"; import { resolve, join } from "path"; import { cwdToProjectRoot } from "./_util.js"; -import { downloadAndExtractTarGz, downloadGitHubRelease } from "./_download.js"; +import { + downloadAndExtractTarGz, + downloadGitHubRelease, + downloadWorkflowArtifacts, +} from "./_download.js"; cwdToProjectRoot(); @@ -10,30 +14,51 @@ cwdToProjectRoot(); const DOWNLOAD_DIR = resolve("downloaded_assets"); await ensureDir(DOWNLOAD_DIR); -// Download Kai assets -await downloadGitHubRelease({ - targetDirectory: join(DOWNLOAD_DIR, "kai/"), - metaFile: join(DOWNLOAD_DIR, "kai", "collect.json"), +if (process.env.WORKFLOW && process.env.WORKFLOW !== "False") { + console.log("WORKFLOW environment variable is set. Downloading workflow artifacts..."); - org: "konveyor", - repo: "kai", - releaseTag: "v0.0.6", + // Download Kai assets via workflow artifacts + await downloadWorkflowArtifacts({ + targetDirectory: join(DOWNLOAD_DIR, "kai/"), + metaFile: join(DOWNLOAD_DIR, "kai", "collect.json"), + org: "konveyor", + repo: "kai", + workflow: "build-and-push-binaries.yml", + assets: [ + { name: "java-deps.zip", chmod: false }, + { name: "kai-rpc-server.linux-aarch64.zip", platform: "linux", arch: "arm64", chmod: true }, + { name: "kai-rpc-server.linux-x86_64.zip", platform: "linux", arch: "x64", chmod: true }, + { name: "kai-rpc-server.macos-arm64.zip", platform: "darwin", arch: "arm64", chmod: true }, + { name: "kai-rpc-server.macos-x86_64.zip", platform: "darwin", arch: "x64", chmod: true }, + { name: "kai-rpc-server.windows-X64.zip", platform: "win32", arch: "x64", chmod: false }, + ], + }); +} else { + console.log("WORKFLOW environment variable is not set. Downloading GitHub release assets..."); - /* - Release asset filenames and nodejs equivalent platform/arch + // Download Kai assets from GitHub release + await downloadGitHubRelease({ + targetDirectory: join(DOWNLOAD_DIR, "kai/"), + metaFile: join(DOWNLOAD_DIR, "kai", "collect.json"), + org: "konveyor", + repo: "kai", + releaseTag: "v0.0.6", + /* + Release asset filenames and nodejs equivalent platform/arch platform: https://nodejs.org/docs/latest-v22.x/api/process.html#processplatform arch: https://nodejs.org/docs/latest-v22.x/api/process.html#processarch - */ - assets: [ - { name: "kai-rpc-server.linux-x86_64.zip", platform: "linux", arch: "x64", chmod: true }, - { name: "kai-rpc-server.linux-aarch64.zip", platform: "linux", arch: "arm64", chmod: true }, - { name: "kai-rpc-server.macos-x86_64.zip", platform: "darwin", arch: "x64", chmod: true }, - { name: "kai-rpc-server.macos-arm64.zip", platform: "darwin", arch: "arm64", chmod: true }, - { name: "kai-rpc-server.windows-x64.zip", platform: "win32", arch: "x64" }, - // { name: "kai-rpc-server.windows-arm64.zip", platform: "win32", arch: "arm64" }, - ], -}); - + */ + assets: [ + { name: "java-deps.zip" }, + { name: "kai-rpc-server.linux-x86_64.zip", platform: "linux", arch: "x64", chmod: true }, + { name: "kai-rpc-server.linux-aarch64.zip", platform: "linux", arch: "arm64", chmod: true }, + { name: "kai-rpc-server.macos-x86_64.zip", platform: "darwin", arch: "x64", chmod: true }, + { name: "kai-rpc-server.macos-arm64.zip", platform: "darwin", arch: "arm64", chmod: true }, + { name: "kai-rpc-server.windows-x64.zip", platform: "win32", arch: "x64" }, + // { name: "kai-rpc-server.windows-arm64.zip", platform: "win32", arch: "arm64" }, + ], + }); +} // Download jdt.ls // Base release url: https://download.eclipse.org/jdtls/milestones/1.38.0/ await downloadAndExtractTarGz({