Add tests for github service, and move all github deployments logic to be inside of github.ts

This commit is contained in:
Maximo Guk 2024-11-19 12:08:36 -06:00
parent 1e92eee7d2
commit cada7a6312
No known key found for this signature in database
GPG key ID: 6ACC2847315F8810
12 changed files with 278 additions and 92 deletions

View file

@ -0,0 +1,5 @@
---
"wrangler-action": minor
---
Add GitHub deployments and job summaries for parity with pages-action

View file

@ -30,7 +30,7 @@ jobs:
- name: Only build app
uses: ./
with:
workingDirectory: "./test/only-build"
workingDirectory: "./src/test/fixtures/only-build"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
@ -39,7 +39,7 @@ jobs:
uses: ./
with:
quiet: true
workingDirectory: "./test/build-quiet"
workingDirectory: "./src/test/fixtures/build-quiet"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
@ -47,7 +47,7 @@ jobs:
- name: Environment support
uses: ./
with:
workingDirectory: "./test/environment"
workingDirectory: "./src/test/fixtures/environment"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
environment: dev
@ -65,7 +65,7 @@ jobs:
uses: ./
with:
wranglerVersion: "2.20.0"
workingDirectory: "./test/secrets-v2"
workingDirectory: "./src/test/fixtures/secrets-v2"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
secrets: |
@ -82,7 +82,7 @@ jobs:
- name: Deploy app secrets w/ default version
uses: ./
with:
workingDirectory: "./test/secrets-default"
workingDirectory: "./src/test/fixtures/secrets-default"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
secrets: |
@ -99,7 +99,7 @@ jobs:
- name: Clean Up Deployed Workers
uses: ./
with:
workingDirectory: "./test/secrets-default"
workingDirectory: "./src/test/fixtures/secrets-default"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: delete --name wrangler-action-test-secrets-v2 --force
@ -109,7 +109,7 @@ jobs:
- name: Support packageManager variable
uses: ./
with:
workingDirectory: "./test/specify-package-manager"
workingDirectory: "./src/test/fixtures/specify-package-manager"
packageManager: "npm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
@ -118,7 +118,7 @@ jobs:
- name: Support unspecified packageManager with no lockfile
uses: ./
with:
workingDirectory: "./test/unspecified-package-manager"
workingDirectory: "./src/test/fixtures/unspecified-package-manager"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
@ -126,7 +126,7 @@ jobs:
- name: Support npm package manager
uses: ./
with:
workingDirectory: "./test/npm"
workingDirectory: "./src/test/fixtures/npm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
@ -137,7 +137,7 @@ jobs:
- name: Support yarn package manager
uses: ./
with:
workingDirectory: "./test/yarn"
workingDirectory: "./src/test/fixtures/yarn"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
@ -148,18 +148,18 @@ jobs:
- name: Support pnpm package manager
uses: ./
with:
workingDirectory: "./test/pnpm"
workingDirectory: "./src/test/fixtures/pnpm"
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --dry-run
- name: Change directory to pre-installed-wrangler and install dependencies
run: |
cd ./test/pre-installed-wrangler
cd ./src/test/fixtures/pre-installed-wrangler
npm install
- name: Support pre-installed wrangler
uses: ./
with:
workingDirectory: "./test/pre-installed-wrangler"
workingDirectory: "./src/test/fixtures/pre-installed-wrangler"
command: action-test

10
package-lock.json generated
View file

@ -18,6 +18,7 @@
"@changesets/changelog-github": "^0.5.0",
"@changesets/cli": "^2.27.9",
"@cloudflare/workers-types": "^4.20241022.0",
"@types/mock-fs": "^4.13.4",
"@types/node": "^22.9.0",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.2",
@ -1375,6 +1376,15 @@
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true
},
"node_modules/@types/mock-fs": {
"version": "4.13.4",
"resolved": "https://registry.npmjs.org/@types/mock-fs/-/mock-fs-4.13.4.tgz",
"integrity": "sha512-mXmM0o6lULPI8z3XNnQCpL0BGxPwx1Ul1wXYEPBGl4efShyxW2Rln0JOPEWGyZaYZMM6OVXM/15zUuFMY52ljg==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/node": {
"version": "22.9.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz",

View file

@ -38,6 +38,7 @@
"@changesets/changelog-github": "^0.5.0",
"@changesets/cli": "^2.27.9",
"@cloudflare/workers-types": "^4.20241022.0",
"@types/mock-fs": "^4.13.4",
"@types/node": "^22.9.0",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.2",

View file

@ -3,8 +3,9 @@ import { getPackageManager } from "./packageManagers";
describe("getPackageManager", () => {
test("should use provided value instead of inferring from lockfile", () => {
expect(getPackageManager("npm", { workingDirectory: "src/test/fixtures/npm" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("npm", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{
"exec": "npx",
"execNoInstall": "npx --no-install",
@ -12,8 +13,9 @@ describe("getPackageManager", () => {
}
`);
expect(getPackageManager("yarn", { workingDirectory: "src/test/fixtures/npm" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("yarn", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{
"exec": "yarn",
"execNoInstall": "yarn",
@ -21,8 +23,9 @@ describe("getPackageManager", () => {
}
`);
expect(getPackageManager("pnpm", { workingDirectory: "src/test/fixtures/npm" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("pnpm", { workingDirectory: "src/test/fixtures/npm" }),
).toMatchInlineSnapshot(`
{
"exec": "pnpm exec",
"execNoInstall": "pnpm exec",
@ -30,8 +33,9 @@ describe("getPackageManager", () => {
}
`);
expect(getPackageManager("bun", { workingDirectory: "src/test/fixtures/bun" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("bun", { workingDirectory: "src/test/fixtures/bun" }),
).toMatchInlineSnapshot(`
{
"exec": "bunx",
"execNoInstall": "bun run",
@ -52,8 +56,9 @@ describe("getPackageManager", () => {
});
test("should use yarn if no value provided and yarn.lock exists", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/yarn" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("", { workingDirectory: "src/test/fixtures/yarn" }),
).toMatchInlineSnapshot(`
{
"exec": "yarn",
"execNoInstall": "yarn",
@ -63,8 +68,9 @@ describe("getPackageManager", () => {
});
test("should use pnpm if no value provided and pnpm-lock.yaml exists", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/pnpm" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("", { workingDirectory: "src/test/fixtures/pnpm" }),
).toMatchInlineSnapshot(`
{
"exec": "pnpm exec",
"execNoInstall": "pnpm exec",
@ -85,8 +91,9 @@ describe("getPackageManager", () => {
});
test("should use npm if no value provided and no lockfile is present", () => {
expect(getPackageManager("", { workingDirectory: "src/test/fixtures/empty" }))
.toMatchInlineSnapshot(`
expect(
getPackageManager("", { workingDirectory: "src/test/fixtures/empty" }),
).toMatchInlineSnapshot(`
{
"exec": "npx",
"execNoInstall": "npx --no-install",

View file

@ -0,0 +1,59 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { setupServer } from "msw/node";
import { createGitHubDeployment, createJobSummary } from "./github";
import { getOctokit } from "@actions/github";
import { mockGithubDeployments } from "../test/mocks";
import { getTestConfig } from "../test/test-utils";
import mockfs from "mock-fs";
import { readFile } from "fs/promises";
afterEach(() => {
mockfs.restore();
});
describe("github", () => {
it("Calls createGitHubDeployment successfully", async () => {
const githubUser = "mock-user";
const githubRepoName = "wrangler-action";
const server = setupServer(
...mockGithubDeployments({ githubUser, githubRepoName }).handlers,
);
server.listen({ onUnhandledRequest: "error" });
vi.stubEnv("GITHUB_REPOSITORY", `${githubUser}/${githubRepoName}`);
const testConfig = getTestConfig();
const octokit = getOctokit(testConfig.GITHUB_TOKEN, { request: fetch });
await createGitHubDeployment({
config: testConfig,
octokit,
productionBranch: "production-branch",
deploymentId: "fake-deployment-id",
projectName: "fake-project-name",
deploymentUrl: "https://fake-deployment-url.com",
environment: "production",
});
server.close();
});
it("Calls createJobSummary successfully", async () => {
vi.stubEnv("GITHUB_STEP_SUMMARY", "summary");
mockfs({
summary: mockfs.file(),
});
await createJobSummary({
commitHash: "fake-commit-hash",
deploymentUrl: "https://fake-deployment-url.com",
aliasUrl: "https://fake-alias-url.com",
});
expect((await readFile("summary")).toString()).toMatchInlineSnapshot(`
"
# Deploying with Cloudflare Pages
| Name | Result |
| ----------------------- | - |
| **Last commit:** | fake-commit-hash |
| **Preview URL**: | https://fake-deployment-url.com |
| **Branch Preview URL**: | https://fake-alias-url.com |
"
`);
});
});

120
src/service/github.ts Normal file
View file

@ -0,0 +1,120 @@
import { summary } from "@actions/core";
import { context, getOctokit } from "@actions/github";
import { env } from "process";
import { info } from "../utils";
import { OutputEntryPagesDeployment } from "../wranglerArtifactManager";
import { WranglerActionConfig } from "../wranglerAction";
type Octokit = ReturnType<typeof getOctokit>;
export async function createGitHubDeployment({
config,
octokit,
productionBranch,
environment,
deploymentId,
projectName,
deploymentUrl,
}: {
config: WranglerActionConfig;
octokit: Octokit;
productionBranch: string;
environment: string;
deploymentId: string | null;
projectName: string;
deploymentUrl?: string;
}) {
const githubBranch = env.GITHUB_HEAD_REF || env.GITHUB_REF_NAME;
const productionEnvironment = githubBranch === productionBranch;
const deployment = await octokit.rest.repos.createDeployment({
owner: context.repo.owner,
repo: context.repo.repo,
ref: githubBranch || context.ref,
auto_merge: false,
description: "Cloudflare Pages",
required_contexts: [],
environment,
production_environment: productionEnvironment,
});
if (deployment.status !== 201) {
info(config, "Error creating GitHub deployment");
return;
}
await octokit.rest.repos.createDeploymentStatus({
owner: context.repo.owner,
repo: context.repo.repo,
deployment_id: deployment.data.id,
environment,
environment_url: deploymentUrl,
production_environment: productionEnvironment,
// don't have project_name or deployment_id I think
log_url: `https://dash.cloudflare.com/${config.CLOUDFLARE_ACCOUNT_ID}/pages/view/${projectName}/${deploymentId}`,
description: "Cloudflare Pages",
state: "success",
auto_inactive: false,
});
}
export async function createJobSummary({
commitHash,
deploymentUrl,
aliasUrl,
}: {
commitHash: string;
deploymentUrl?: string;
aliasUrl?: string;
}) {
await summary
.addRaw(
`
# Deploying with Cloudflare Pages
| Name | Result |
| ----------------------- | - |
| **Last commit:** | ${commitHash} |
| **Preview URL**: | ${deploymentUrl} |
| **Branch Preview URL**: | ${aliasUrl} |
`,
)
.write();
}
/**
* Create github deployment, if GITHUB_TOKEN is present in config
*/
export async function createGitHubDeploymentAndJobSummary(
config: WranglerActionConfig,
pagesArtifactFields: OutputEntryPagesDeployment,
) {
if (
config.GITHUB_TOKEN &&
pagesArtifactFields.production_branch &&
pagesArtifactFields.pages_project &&
pagesArtifactFields.deployment_trigger &&
pagesArtifactFields.stages
) {
const octokit = getOctokit(config.GITHUB_TOKEN);
await Promise.all([
createGitHubDeployment({
config,
octokit,
deploymentUrl: pagesArtifactFields.url,
productionBranch: pagesArtifactFields.production_branch,
environment: pagesArtifactFields.environment,
deploymentId: pagesArtifactFields.deployment_id,
projectName: pagesArtifactFields.pages_project,
}),
createJobSummary({
commitHash:
pagesArtifactFields.deployment_trigger.metadata.commit_hash.substring(
0,
8,
),
deploymentUrl: pagesArtifactFields.url,
aliasUrl: pagesArtifactFields.alias,
}),
]);
}
}

View file

@ -1,4 +1,5 @@
import { http, HttpResponse } from "msw";
import { z } from "zod";
export function mockGithubDeployments({
githubUser,
@ -15,6 +16,15 @@ export function mockGithubDeployments({
if (request.headers.get("Authorization") === null) {
return HttpResponse.text("error: no auth token", { status: 400 });
}
const GithubDeploymentsRequest = z.object({
auto_merge: z.literal(false),
description: z.literal("Cloudflare Pages"),
required_contexts: z.array(z.string()).length(0),
environment: z.literal("production"),
production_environment: z.literal(false),
});
// validate request body
GithubDeploymentsRequest.parse(await request.json());
return HttpResponse.json(null);
},

View file

@ -1,6 +1,8 @@
import { existsSync } from "node:fs";
import * as path from "node:path";
import semverGt from "semver/functions/gt";
import { info as originalInfo, error as originalError } from "@actions/core";
import { WranglerActionConfig } from "./wranglerAction";
/**
* A helper function to compare two semver versions. If the second arg is greater than the first arg, it returns true.
@ -19,3 +21,23 @@ export function checkWorkingDirectory(workingDirectory = ".") {
throw new Error(`Directory ${workingDirectory} does not exist.`);
}
}
export function info(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalInfo(message);
}
}
export function error(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalError(message);
}
}

View file

@ -2,8 +2,6 @@ import {
debug,
getMultilineInput,
endGroup as originalEndGroup,
error as originalError,
info as originalInfo,
startGroup as originalStartGroup,
setFailed,
setOutput,
@ -13,10 +11,9 @@ import semverEq from "semver/functions/eq";
import { z } from "zod";
import { exec, execShell } from "./exec";
import { PackageManager } from "./packageManagers";
import { semverCompare } from "./utils";
import { error, info, semverCompare } from "./utils";
import { getDetailedPagesDeployOutput } from "./wranglerArtifactManager";
import { createGitHubDeployment, createJobSummary } from "./service/github";
import { getOctokit } from "@actions/github";
import { createGitHubDeploymentAndJobSummary } from "./service/github";
export type WranglerActionConfig = z.infer<typeof wranglerActionConfig>;
export const wranglerActionConfig = z.object({
@ -35,26 +32,6 @@ export const wranglerActionConfig = z.object({
GITHUB_TOKEN: z.string(),
});
function info(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalInfo(message);
}
}
function error(
config: WranglerActionConfig,
message: string,
bypass?: boolean,
): void {
if (!config.QUIET_MODE || bypass) {
originalError(message);
}
}
function startGroup(config: WranglerActionConfig, name: string): void {
if (!config.QUIET_MODE) {
originalStartGroup(name);
@ -423,36 +400,11 @@ async function wranglerCommands(
setOutput("pages-deployment-alias-url", pagesArtifactFields.alias);
setOutput("pages-deployment-id", pagesArtifactFields.deployment_id);
setOutput("pages-environment", pagesArtifactFields.environment);
// create github deployment, if GITHUB_TOKEN is provided
if (
config.GITHUB_TOKEN &&
pagesArtifactFields.production_branch &&
pagesArtifactFields.project_name &&
pagesArtifactFields.deployment_trigger &&
pagesArtifactFields.stages
) {
const octokit = getOctokit(config.GITHUB_TOKEN);
await Promise.all([
createGitHubDeployment({
config,
octokit,
deploymentUrl: pagesArtifactFields.url,
productionBranch: pagesArtifactFields.production_branch,
environment: pagesArtifactFields.environment,
deploymentId: pagesArtifactFields.deployment_id,
projectName: pagesArtifactFields.project_name,
}),
createJobSummary({
commitHash:
pagesArtifactFields.deployment_trigger.metadata.commit_hash.substring(
0,
8,
),
deploymentUrl: pagesArtifactFields.url,
aliasUrl: pagesArtifactFields.alias,
}),
]);
}
// Create github deployment, if GITHUB_TOKEN is present in config
await createGitHubDeploymentAndJobSummary(
config,
pagesArtifactFields,
);
} else {
info(
config,

View file

@ -1,17 +1,17 @@
import mock from "mock-fs";
import mockfs from "mock-fs";
import { afterEach, describe, expect, it } from "vitest";
import {
getDetailedPagesDeployOutput,
getWranglerArtifacts,
} from "./wranglerArtifactManager";
afterEach(async () => {
mock.restore();
afterEach(() => {
mockfs.restore();
});
describe("wranglerArtifactsManager", () => {
describe("getWranglerArtifacts()", async () => {
it("Returns only wrangler output files from a given directory", async () => {
mock({
mockfs({
testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
{"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"}
@ -27,7 +27,7 @@ describe("wranglerArtifactsManager", () => {
]);
});
it("Returns an empty list when the output directory doesn't exist", async () => {
mock({
mockfs({
notTheDirWeWant: {},
});
@ -38,7 +38,7 @@ describe("wranglerArtifactsManager", () => {
describe("getDetailedPagesDeployOutput()", async () => {
it("Returns only detailed pages deploy output from wrangler artifacts", async () => {
mock({
mockfs({
testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
{"version": 1, "type":"wrangler-session", "wrangler_version":"3.81.0", "command_line_args":["what's up"], "log_file_path": "/here"}
@ -60,7 +60,7 @@ describe("wranglerArtifactsManager", () => {
});
}),
it("Skips artifact entries that are not parseable", async () => {
mock({
mockfs({
testOutputDir: {
"wrangler-output-2024-10-17_18-48-40_463-2e6e83.json": `
this line is invalid json.

View file

@ -15,8 +15,6 @@ const OutputEntryPagesDeployment = OutputEntryBase.merge(
alias: z.string().optional(),
environment: z.enum(["production", "preview"]),
// optional, added in wrangler@TBD
project_name: z.string().optional(),
// optional, added in wrangler@TBD
production_branch: z.string().optional(),
// optional, added in wrangler@TBD
stages: z
@ -54,7 +52,9 @@ const OutputEntryPagesDeployment = OutputEntryBase.merge(
}),
);
type OutputEntryPagesDeployment = z.infer<typeof OutputEntryPagesDeployment>;
export type OutputEntryPagesDeployment = z.infer<
typeof OutputEntryPagesDeployment
>;
/**
* Parses file names in a directory to find wrangler artifact files