Add tests for github service, and move all github deployments logic to be inside of github.ts

This commit is contained in:
Maximo Guk 2024-11-19 12:08:36 -06:00
parent 1e92eee7d2
commit cada7a6312
No known key found for this signature in database
GPG key ID: 6ACC2847315F8810
12 changed files with 278 additions and 92 deletions

View file

@ -0,0 +1,5 @@
---
"wrangler-action": minor
---
Add GitHub deployments and job summaries for parity with pages-action

View file

@ -30,7 +30,7 @@ jobs:
- name: Only build app - name: Only build app
uses: ./ uses: ./
with: with:
workingDirectory: "./test/only-build" workingDirectory: "./src/test/fixtures/only-build"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
@ -39,7 +39,7 @@ jobs:
uses: ./ uses: ./
with: with:
quiet: true quiet: true
workingDirectory: "./test/build-quiet" workingDirectory: "./src/test/fixtures/build-quiet"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
@ -47,7 +47,7 @@ jobs:
- name: Environment support - name: Environment support
uses: ./ uses: ./
with: with:
workingDirectory: "./test/environment" workingDirectory: "./src/test/fixtures/environment"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
environment: dev environment: dev
@ -65,7 +65,7 @@ jobs:
uses: ./ uses: ./
with: with:
wranglerVersion: "2.20.0" wranglerVersion: "2.20.0"
workingDirectory: "./test/secrets-v2" workingDirectory: "./src/test/fixtures/secrets-v2"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
secrets: | secrets: |
@ -82,7 +82,7 @@ jobs:
- name: Deploy app secrets w/ default version - name: Deploy app secrets w/ default version
uses: ./ uses: ./
with: with:
workingDirectory: "./test/secrets-default" workingDirectory: "./src/test/fixtures/secrets-default"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
secrets: | secrets: |
@ -99,7 +99,7 @@ jobs:
- name: Clean Up Deployed Workers - name: Clean Up Deployed Workers
uses: ./ uses: ./
with: with:
workingDirectory: "./test/secrets-default" workingDirectory: "./src/test/fixtures/secrets-default"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: delete --name wrangler-action-test-secrets-v2 --force command: delete --name wrangler-action-test-secrets-v2 --force
@ -109,7 +109,7 @@ jobs:
- name: Support packageManager variable - name: Support packageManager variable
uses: ./ uses: ./
with: with:
workingDirectory: "./test/specify-package-manager" workingDirectory: "./src/test/fixtures/specify-package-manager"
packageManager: "npm" packageManager: "npm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
@ -118,7 +118,7 @@ jobs:
- name: Support unspecified packageManager with no lockfile - name: Support unspecified packageManager with no lockfile
uses: ./ uses: ./
with: with:
workingDirectory: "./test/unspecified-package-manager" workingDirectory: "./src/test/fixtures/unspecified-package-manager"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
@ -126,7 +126,7 @@ jobs:
- name: Support npm package manager - name: Support npm package manager
uses: ./ uses: ./
with: with:
workingDirectory: "./test/npm" workingDirectory: "./src/test/fixtures/npm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
@ -137,7 +137,7 @@ jobs:
- name: Support yarn package manager - name: Support yarn package manager
uses: ./ uses: ./
with: with:
workingDirectory: "./test/yarn" workingDirectory: "./src/test/fixtures/yarn"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
@ -148,18 +148,18 @@ jobs:
- name: Support pnpm package manager - name: Support pnpm package manager
uses: ./ uses: ./
with: with:
workingDirectory: "./test/pnpm" workingDirectory: "./src/test/fixtures/pnpm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run command: deploy --dry-run
- name: Change directory to pre-installed-wrangler and install dependencies - name: Change directory to pre-installed-wrangler and install dependencies
run: | run: |
cd ./test/pre-installed-wrangler cd ./src/test/fixtures/pre-installed-wrangler
npm install npm install
- name: Support pre-installed wrangler - name: Support pre-installed wrangler
uses: ./ uses: ./
with: with:
workingDirectory: "./test/pre-installed-wrangler" workingDirectory: "./src/test/fixtures/pre-installed-wrangler"
command: action-test command: action-test

10
package-lock.json generated
View file

@ -18,6 +18,7 @@
"@changesets/changelog-github": "^0.5.0", "@changesets/changelog-github": "^0.5.0",
"@changesets/cli": "^2.27.9", "@changesets/cli": "^2.27.9",
"@cloudflare/workers-types": "^4.20241022.0", "@cloudflare/workers-types": "^4.20241022.0",
"@types/mock-fs": "^4.13.4",
"@types/node": "^22.9.0", "@types/node": "^22.9.0",
"@types/semver": "^7.5.8", "@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.2", "@vercel/ncc": "^0.38.2",
@ -1375,6 +1376,15 @@
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true "dev": true
}, },
"node_modules/@types/mock-fs": {
"version": "4.13.4",
"resolved": "https://registry.npmjs.org/@types/mock-fs/-/mock-fs-4.13.4.tgz",
"integrity": "sha512-mXmM0o6lULPI8z3XNnQCpL0BGxPwx1Ul1wXYEPBGl4efShyxW2Rln0JOPEWGyZaYZMM6OVXM/15zUuFMY52ljg==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "22.9.0", "version": "22.9.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz",

View file

@ -38,6 +38,7 @@
"@changesets/changelog-github": "^0.5.0", "@changesets/changelog-github": "^0.5.0",
"@changesets/cli": "^2.27.9", "@changesets/cli": "^2.27.9",
"@cloudflare/workers-types": "^4.20241022.0", "@cloudflare/workers-types": "^4.20241022.0",
"@types/mock-fs": "^4.13.4",
"@types/node": "^22.9.0", "@types/node": "^22.9.0",
"@types/semver": "^7.5.8", "@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.2", "@vercel/ncc": "^0.38.2",

View file

@ -3,8 +3,9 @@ import { getPackageManager } from "./packageManagers";
describe("getPackageManager", () => { describe("getPackageManager", () => {
test("should use provided value instead of inferring from lockfile", () => { test("should use provided value instead of inferring from lockfile", () => {
expect(getPackageManager("npm", { workingDirectory: "src/test/fixtures/npm" })) expect(
.toMatchInlineSnapshot(` getPackageManager("npm", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{ {
"exec": "npx", "exec": "npx",
"execNoInstall": "npx --no-install", "execNoInstall": "npx --no-install",
@ -12,8 +13,9 @@ describe("getPackageManager", () => {
} }
`); `);
expect(getPackageManager("yarn", { workingDirectory: "src/test/fixtures/npm" })) expect(
.toMatchInlineSnapshot(` getPackageManager("yarn", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{ {
"exec": "yarn", "exec": "yarn",
"execNoInstall": "yarn", "execNoInstall": "yarn",
@ -21,8 +23,9 @@ describe("getPackageManager", () => {
} }
`); `);
expect(getPackageManager("pnpm", { workingDirectory: "src/test/fixtures/npm" })) expect(
.toMatchInlineSnapshot(` getPackageManager("pnpm", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{ {
"exec": "pnpm exec", "exec": "pnpm exec",
"execNoInstall": "pnpm exec", "execNoInstall": "pnpm exec",
@ -30,8 +33,9 @@ describe("getPackageManager", () => {
} }
`); `);
expect(getPackageManager("bun", { workingDirectory: "src/test/fixtures/bun" })) expect(
.toMatchInlineSnapshot(` getPackageManager("bun", { workingDirectory: "src/test/fixtures/bun" }),
).toMatchInlineSnapshot(`
{ {
"exec": "bunx", "exec": "bunx",
"execNoInstall": "bun run", "execNoInstall": "bun run",
@ -52,8 +56,9 @@ describe("getPackageManager", () => {
}); });
test("should use yarn if no value provided and yarn.lock exists", () => { test("should use yarn if no value provided and yarn.lock exists", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/yarn" })) expect(
.toMatchInlineSnapshot(` getPackageManager("", { workingDirectory: "src/test/fixtures/yarn" }),
).toMatchInlineSnapshot(`
{ {
"exec": "yarn", "exec": "yarn",
"execNoInstall": "yarn", "execNoInstall": "yarn",
@ -63,8 +68,9 @@ describe("getPackageManager", () => {
}); });
test("should use pnpm if no value provided and pnpm-lock.yaml exists", () => { test("should use pnpm if no value provided and pnpm-lock.yaml exists", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/pnpm" })) expect(
.toMatchInlineSnapshot(` getPackageManager("", { workingDirectory: "src/test/fixtures/pnpm" }),
).toMatchInlineSnapshot(`
{ {
"exec": "pnpm exec", "exec": "pnpm exec",
"execNoInstall": "pnpm exec", "execNoInstall": "pnpm exec",
@ -85,8 +91,9 @@ describe("getPackageManager", () => {
}); });
test("should use npm if no value provided and no lockfile is present", () => { test("should use npm if no value provided and no lockfile is present", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/empty" })) expect(
.toMatchInlineSnapshot(` getPackageManager("", { workingDirectory: "src/test/fixtures/empty" }),
).toMatchInlineSnapshot(`
{ {
"exec": "npx", "exec": "npx",
"execNoInstall": "npx --no-install", "execNoInstall": "npx --no-install",

View file

@ -0,0 +1,59 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { setupServer } from "msw/node";
import { createGitHubDeployment, createJobSummary } from "./github";
import { getOctokit } from "@actions/github";
import { mockGithubDeployments } from "../test/mocks";
import { getTestConfig } from "../test/test-utils";
import mockfs from "mock-fs";
import { readFile } from "fs/promises";
afterEach(() => {
mockfs.restore();
});
describe("github", () => {
it("Calls createGitHubDeployment successfully", async () => {
const githubUser = "mock-user";
const githubRepoName = "wrangler-action";
const server = setupServer(
...mockGithubDeployments({ githubUser, githubRepoName }).handlers,
);
server.listen({ onUnhandledRequest: "error" });
vi.stubEnv("GITHUB_REPOSITORY", `${githubUser}/${githubRepoName}`);
const testConfig = getTestConfig();
const octokit = getOctokit(testConfig.GITHUB_TOKEN, { request: fetch });
await createGitHubDeployment({
config: testConfig,
octokit,
productionBranch: "production-branch",
deploymentId: "fake-deployment-id",
projectName: "fake-project-name",
deploymentUrl: "https://fake-deployment-url.com",
environment: "production",
});
server.close();
});
it("Calls createJobSummary successfully", async () => {
vi.stubEnv("GITHUB_STEP_SUMMARY", "summary");
mockfs({
summary: mockfs.file(),
});
await createJobSummary({
commitHash: "fake-commit-hash",
deploymentUrl: "https://fake-deployment-url.com",
aliasUrl: "https://fake-alias-url.com",
});
expect((await readFile("summary")).toString()).toMatchInlineSnapshot(`
"
# Deploying with Cloudflare Pages
| Name | Result |
| ----------------------- | - |
| **Last commit:** | fake-commit-hash |
| **Preview URL**: | https://fake-deployment-url.com |
| **Branch Preview URL**: | https://fake-alias-url.com |
"
`);
});
});

120
src/service/github.ts Normal file
View file

@ -0,0 +1,120 @@
import { summary } from "@actions/core";
import { context, getOctokit } from "@actions/github";
import { env } from "process";
import { info } from "../utils";
import { OutputEntryPagesDeployment } from "../wranglerArtifactManager";
import { WranglerActionConfig } from "../wranglerAction";
type Octokit = ReturnType<typeof getOctokit>;
export async function createGitHubDeployment({
config,
octokit,
productionBranch,
environment,
deploymentId,
projectName,
deploymentUrl,
}: {
config: WranglerActionConfig;
octokit: Octokit;
productionBranch: string;
environment: string;
deploymentId: string | null;
projectName: string;
deploymentUrl?: string;
}) {
const githubBranch = env.GITHUB_HEAD_REF || env.GITHUB_REF_NAME;
const productionEnvironment = githubBranch === productionBranch;
const deployment = await octokit.rest.repos.createDeployment({
owner: context.repo.owner,
repo: context.repo.repo,
ref: githubBranch || context.ref,
auto_merge: false,
description: "Cloudflare Pages",
required_contexts: [],
environment,
production_environment: productionEnvironment,
});
if (deployment.status !== 201) {
info(config, "Error creating GitHub deployment");
return;
}
await octokit.rest.repos.createDeploymentStatus({
owner: context.repo.owner,
repo: context.repo.repo,
deployment_id: deployment.data.id,
environment,
environment_url: deploymentUrl,
production_environment: productionEnvironment,
// don't have project_name or deployment_id I think
log_url: `https://dash.cloudflare.com/${config.CLOUDFLARE_ACCOUNT_ID}/pages/view/${projectName}/${deploymentId}`,
description: "Cloudflare Pages",
state: "success",
auto_inactive: false,
});
}
export async function createJobSummary({
commitHash,
deploymentUrl,
aliasUrl,
}: {
commitHash: string;
deploymentUrl?: string;
aliasUrl?: string;
}) {
await summary
.addRaw(
`
# Deploying with Cloudflare Pages
| Name | Result |
| ----------------------- | - |
| **Last commit:** | ${commitHash} |
| **Preview URL**: | ${deploymentUrl} |
| **Branch Preview URL**: | ${aliasUrl} |
`,
)
.write();
}
/**
* Create github deployment, if GITHUB_TOKEN is present in config
*/
export async function createGitHubDeploymentAndJobSummary(
config: WranglerActionConfig,
pagesArtifactFields: OutputEntryPagesDeployment,
) {
if (
config.GITHUB_TOKEN &&
pagesArtifactFields.production_branch &&
pagesArtifactFields.pages_project &&
pagesArtifactFields.deployment_trigger &&
pagesArtifactFields.stages
) {
const octokit = getOctokit(config.GITHUB_TOKEN);
await Promise.all([
createGitHubDeployment({
config,
octokit,
deploymentUrl: pagesArtifactFields.url,
productionBranch: pagesArtifactFields.production_branch,
environment: pagesArtifactFields.environment,
deploymentId: pagesArtifactFields.deployment_id,
projectName: pagesArtifactFields.pages_project,
}),
createJobSummary({
commitHash:
pagesArtifactFields.deployment_trigger.metadata.commit_hash.substring(
0,
8,
),
deploymentUrl: pagesArtifactFields.url,
aliasUrl: pagesArtifactFields.alias,
}),
]);
}
}

View file

@ -1,4 +1,5 @@
import { http, HttpResponse } from "msw"; import { http, HttpResponse } from "msw";
import { z } from "zod";
export function mockGithubDeployments({ export function mockGithubDeployments({
githubUser, githubUser,
@ -15,6 +16,15 @@ export function mockGithubDeployments({
if (request.headers.get("Authorization") === null) { if (request.headers.get("Authorization") === null) {
return HttpResponse.text("error: no auth token", { status: 400 }); return HttpResponse.text("error: no auth token", { status: 400 });
} }
const GithubDeploymentsRequest = z.object({
auto_merge: z.literal(false),
description: z.literal("Cloudflare Pages"),
required_contexts: z.array(z.string()).length(0),
environment: z.literal("production"),
production_environment: z.literal(false),
});
// validate request body
GithubDeploymentsRequest.parse(await request.json());
return HttpResponse.json(null); return HttpResponse.json(null);
}, },

View file

@ -1,6 +1,8 @@
import { existsSync } from "node:fs"; import { existsSync } from "node:fs";
import * as path from "node:path"; import * as path from "node:path";
import semverGt from "semver/functions/gt"; import semverGt from "semver/functions/gt";
import { info as originalInfo, error as originalError } from "@actions/core";
import { WranglerActionConfig } from "./wranglerAction";
/** /**
* A helper function to compare two semver versions. If the second arg is greater than the first arg, it returns true. * A helper function to compare two semver versions. If the second arg is greater than the first arg, it returns true.
@ -19,3 +21,23 @@ export function checkWorkingDirectory(workingDirectory = ".") {
throw new Error(`Directory ${workingDirectory} does not exist.`); throw new Error(`Directory ${workingDirectory} does not exist.`);
} }
} }
export function info(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalInfo(message);
}
}
export function error(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalError(message);
}
}

View file

@ -2,8 +2,6 @@ import {
debug, debug,
getMultilineInput, getMultilineInput,
endGroup as originalEndGroup, endGroup as originalEndGroup,
error as originalError,
info as originalInfo,
startGroup as originalStartGroup, startGroup as originalStartGroup,
setFailed, setFailed,
setOutput, setOutput,
@ -13,10 +11,9 @@ import semverEq from "semver/functions/eq";
import { z } from "zod"; import { z } from "zod";
import { exec, execShell } from "./exec"; import { exec, execShell } from "./exec";
import { PackageManager } from "./packageManagers"; import { PackageManager } from "./packageManagers";
import { semverCompare } from "./utils"; import { error, info, semverCompare } from "./utils";
import { getDetailedPagesDeployOutput } from "./wranglerArtifactManager"; import { getDetailedPagesDeployOutput } from "./wranglerArtifactManager";
import { createGitHubDeployment, createJobSummary } from "./service/github"; import { createGitHubDeploymentAndJobSummary } from "./service/github";
import { getOctokit } from "@actions/github";
export type WranglerActionConfig = z.infer<typeof wranglerActionConfig>; export type WranglerActionConfig = z.infer<typeof wranglerActionConfig>;
export const wranglerActionConfig = z.object({ export const wranglerActionConfig = z.object({
@ -35,26 +32,6 @@ export const wranglerActionConfig = z.object({
GITHUB_TOKEN: z.string(), GITHUB_TOKEN: z.string(),
}); });
function info(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalInfo(message);
}
}
function error(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalError(message);
}
}
function startGroup(config: WranglerActionConfig, name: string): void { function startGroup(config: WranglerActionConfig, name: string): void {
if (!config.QUIET_MODE) { if (!config.QUIET_MODE) {
originalStartGroup(name); originalStartGroup(name);
@ -423,36 +400,11 @@ async function wranglerCommands(
setOutput("pages-deployment-alias-url", pagesArtifactFields.alias); setOutput("pages-deployment-alias-url", pagesArtifactFields.alias);
setOutput("pages-deployment-id", pagesArtifactFields.deployment_id); setOutput("pages-deployment-id", pagesArtifactFields.deployment_id);
setOutput("pages-environment", pagesArtifactFields.environment); setOutput("pages-environment", pagesArtifactFields.environment);
// create github deployment, if GITHUB_TOKEN is provided // Create github deployment, if GITHUB_TOKEN is present in config
if ( await createGitHubDeploymentAndJobSummary(
config.GITHUB_TOKEN && config,
pagesArtifactFields.production_branch && pagesArtifactFields,
pagesArtifactFields.project_name && );
pagesArtifactFields.deployment_trigger &&
pagesArtifactFields.stages
) {
const octokit = getOctokit(config.GITHUB_TOKEN);
await Promise.all([
createGitHubDeployment({
config,
octokit,
deploymentUrl: pagesArtifactFields.url,
productionBranch: pagesArtifactFields.production_branch,
environment: pagesArtifactFields.environment,
deploymentId: pagesArtifactFields.deployment_id,
projectName: pagesArtifactFields.project_name,
}),
createJobSummary({
commitHash:
pagesArtifactFields.deployment_trigger.metadata.commit_hash.substring(
0,
8,
),
deploymentUrl: pagesArtifactFields.url,
aliasUrl: pagesArtifactFields.alias,
}),
]);
}
} else { } else {
info( info(
config, config,

View file

@ -1,17 +1,17 @@
import mock from "mock-fs"; import mockfs from "mock-fs";
import { afterEach, describe, expect, it } from "vitest"; import { afterEach, describe, expect, it } from "vitest";
import { import {
getDetailedPagesDeployOutput, getDetailedPagesDeployOutput,
getWranglerArtifacts, getWranglerArtifacts,
} from "./wranglerArtifactManager"; } from "./wranglerArtifactManager";
afterEach(async () => { afterEach(() => {
mock.restore(); mockfs.restore();
}); });
describe("wranglerArtifactsManager", () => { describe("wranglerArtifactsManager", () => {
describe("getWranglerArtifacts()", async () => { describe("getWranglerArtifacts()", async () => {
it("Returns only wrangler output files from a given directory", async () => { it("Returns only wrangler output files from a given directory", async () => {
mock({ mockfs({
testOutputDir: { testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": ` "wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
{"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"} {"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"}
@ -27,7 +27,7 @@ describe("wranglerArtifactsManager", () => {
]); ]);
}); });
it("Returns an empty list when the output directory doesn't exist", async () => { it("Returns an empty list when the output directory doesn't exist", async () => {
mock({ mockfs({
notTheDirWeWant: {}, notTheDirWeWant: {},
}); });
@ -38,7 +38,7 @@ describe("wranglerArtifactsManager", () => {
describe("getDetailedPagesDeployOutput()", async () => { describe("getDetailedPagesDeployOutput()", async () => {
it("Returns only detailed pages deploy output from wrangler artifacts", async () => { it("Returns only detailed pages deploy output from wrangler artifacts", async () => {
mock({ mockfs({
testOutputDir: { testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": ` "wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
{"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"} {"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"}
@ -60,7 +60,7 @@ describe("wranglerArtifactsManager", () => {
}); });
}), }),
it("Skips artifact entries that are not parseable", async () => { it("Skips artifact entries that are not parseable", async () => {
mock({ mockfs({
testOutputDir: { testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": ` "wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
this line is invalid json. this line is invalid json.

View file

@ -15,8 +15,6 @@ const OutputEntryPagesDeployment = OutputEntryBase.merge(
alias: z.string().optional(), alias: z.string().optional(),
environment: z.enum(["production", "preview"]), environment: z.enum(["production", "preview"]),
// optional, added in wrangler@TBD // optional, added in wrangler@TBD
project_name: z.string().optional(),
// optional, added in wrangler@TBD
production_branch: z.string().optional(), production_branch: z.string().optional(),
// optional, added in wrangler@TBD // optional, added in wrangler@TBD
stages: z stages: z
@ -54,7 +52,9 @@ const OutputEntryPagesDeployment = OutputEntryBase.merge(
}), }),
); );
type OutputEntryPagesDeployment = z.infer<typeof OutputEntryPagesDeployment>; export type OutputEntryPagesDeployment = z.infer<
typeof OutputEntryPagesDeployment
>;
/** /**
* Parses file names in a directory to find wrangler artifact files * Parses file names in a directory to find wrangler artifact files