Sync script to Git repo
One script reply has been approved by the moderators Verified

This script will pull the script from the current workspace in a temporary folder, then commit it to the remote Git repository and push it. Only the script-related file will be pushed, nothing else. It takes as input the git_repository resource containing the repository URL, the script path, and the commit message to use for the commit. All params are mandatory.

Created by hugo697 882 days ago Picked 136 times
Submitted by pyranota275 Bun
Verified 7 days ago
1
import * as wmillclient from "windmill-client";
2
import wmill from "[email protected]";
3
import { basename } from "node:path";
4
const util = require("util");
5
const exec = util.promisify(require("child_process").exec);
6
import process from "process";
7

8
type GpgKey = {
9
  email: string;
10
  private_key: string;
11
  passphrase: string;
12
};
13

14
const FORKED_WORKSPACE_PREFIX = "wm-fork-";
15
const FORKED_BRANCH_PREFIX = "wm-fork";
16

17
type PathType =
18
  | "script"
19
  | "flow"
20
  | "app"
21
  | "raw_app"
22
  | "folder"
23
  | "resource"
24
  | "variable"
25
  | "resourcetype"
26
  | "schedule"
27
  | "user"
28
  | "group"
29
  | "httptrigger"
30
  | "websockettrigger"
31
  | "kafkatrigger"
32
  | "natstrigger"
33
  | "postgrestrigger"
34
  | "mqtttrigger"
35
  | "sqstrigger"
36
  | "gcptrigger"
37
  | "azuretrigger"
38
  | "emailtrigger";
39

40
type SyncObject = {
41
  path_type: PathType;
42
  path: string | undefined;
43
  parent_path: string | undefined;
44
  commit_msg: string;
45
};
46

47
let gpgFingerprint: string | undefined = undefined;
48

49
export async function main(
50
  items: SyncObject[],
51
  // Compat, do not use in code, rely on `items` instead
52
  path_type: PathType | undefined,
53
  path: string | undefined,
54
  parent_path: string | undefined,
55
  commit_msg: string | undefined,
56
  //
57
  workspace_id: string,
58
  repo_url_resource_path: string,
59
  skip_secret: boolean = true,
60
  use_individual_branch: boolean = false,
61
  group_by_folder: boolean = false,
62
  only_create_branch: boolean = false,
63
  parent_workspace_id?: string,
64
) {
65

66
  if (path_type !== undefined && commit_msg !== undefined) {
67
    items = [{
68
      path_type,
69
      path,
70
      parent_path,
71
      commit_msg,
72
    }];
73
  }
74
  await inner(items, workspace_id, repo_url_resource_path, skip_secret, use_individual_branch, group_by_folder, only_create_branch, parent_workspace_id);
75
}
76

77
async function inner(
78
  items: SyncObject[],
79
  workspace_id: string,
80
  repo_url_resource_path: string,
81
  skip_secret: boolean = true,
82
  use_individual_branch: boolean = false,
83
  group_by_folder: boolean = false,
84
  only_create_branch: boolean = false,
85
  parent_workspace_id?: string,
86
) {
87

88
  let safeDirectoryPath: string | undefined;
89
  const repo_resource = await wmillclient.getResource(repo_url_resource_path);
90
  const cwd = process.cwd();
91
  process.env["HOME"] = ".";
92
  if (!only_create_branch) {
93
    for (const item of items) {
94
      console.log(
95
        `Syncing ${item.path_type} ${item.path ?? ""} with parent ${item.parent_path ?? ""}`
96
      );
97
    }
98
  }
99

100
  if (repo_resource.is_github_app) {
101
    const token = await get_gh_app_token();
102
    const authRepoUrl = prependTokenToGitHubUrl(repo_resource.url, token);
103
    repo_resource.url = authRepoUrl;
104
  }
105

106
  const { repo_name, safeDirectoryPath: cloneSafeDirectoryPath, clonedBranchName } = await git_clone(cwd, repo_resource, use_individual_branch || workspace_id.startsWith(FORKED_WORKSPACE_PREFIX));
107
  safeDirectoryPath = cloneSafeDirectoryPath;
108

109

110
  // Since we don't modify the resource on the forked workspaces, we have to cosnider the case of
111
  // a fork of a fork workspace. In that case, the original branch is not stored in the resource
112
  // settings, but we need to infer it from the workspace id
113

114
  if (workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
115
    if (use_individual_branch) {
116
      console.log("Cannot have `use_individual_branch` in a forked workspace, disabling option`");
117
      use_individual_branch = false;
118
    }
119
    if (group_by_folder) {
120
      console.log("Cannot have `group_by_folder` in a forked workspace, disabling option`");
121
      group_by_folder = false;
122
    }
123
  }
124

125
  if (parent_workspace_id && parent_workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
126
    const parentBranch = get_fork_branch_name(parent_workspace_id, clonedBranchName);
127
    console.log(`This workspace's parent is also a fork, moving to branch ${parentBranch} in case a new branch needs to be created with the appropriate root`);
128
    await git_checkout_branch(
129
      items,
130
      parent_workspace_id,
131
      use_individual_branch,
132
      group_by_folder,
133
      clonedBranchName
134
    );
135
  }
136

137
  await git_checkout_branch(
138
    items,
139
    workspace_id,
140
    use_individual_branch,
141
    group_by_folder,
142
    clonedBranchName
143
  );
144

145

146
  const subfolder = repo_resource.folder ?? "";
147
  const branch_or_default = repo_resource.branch ?? "<DEFAULT>";
148
  console.log(
149
    `Pushing to repository ${repo_name} in subfolder ${subfolder} on branch ${branch_or_default}`
150
  );
151

152
  // If we want to just create the branch, we can skip pulling the changes.
153
  if (!only_create_branch) {
154
    await wmill_sync_pull(
155
      items,
156
      workspace_id,
157
      skip_secret,
158
      repo_url_resource_path,
159
      use_individual_branch,
160
      repo_resource.branch
161
    );
162
  }
163
  try {
164
    await git_push(items, repo_resource, only_create_branch);
165
  } catch (e) {
166
    throw e;
167
  } finally {
168
    await delete_pgp_keys();
169
    // Cleanup: remove safe.directory config
170
    if (safeDirectoryPath) {
171
      try {
172
        await sh_run(undefined, "git", "config", "--global", "--unset", "safe.directory", safeDirectoryPath);
173
      } catch (e) {
174
        console.log(`Warning: Could not unset safe.directory config: ${e}`);
175
      }
176
    }
177
  }
178
  console.log("Finished syncing");
179
  process.chdir(`${cwd}`);
180
}
181

182

183
function get_fork_branch_name(w_id: string, originalBranch: string): string {
184
  if (w_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
185
    return w_id.replace(FORKED_WORKSPACE_PREFIX, `${FORKED_BRANCH_PREFIX}/${originalBranch}/`);
186
  }
187
  return w_id;
188
}
189

190
async function git_clone(
191
  cwd: string,
192
  repo_resource: any,
193
  no_single_branch: boolean,
194
): Promise<{ repo_name: string; safeDirectoryPath: string; clonedBranchName: string }> {
195
  // TODO: handle private SSH keys as well
196
  let repo_url = repo_resource.url;
197
  const subfolder = repo_resource.folder ?? "";
198
  const branch = repo_resource.branch ?? "";
199
  const repo_name = basename(repo_url, ".git");
200
  const azureMatch = repo_url.match(/AZURE_DEVOPS_TOKEN\((?<url>.+)\)/);
201
  if (azureMatch) {
202
    console.log(
203
      "Requires Azure DevOps service account access token, requesting..."
204
    );
205
    const azureResource = await wmillclient.getResource(azureMatch.groups.url);
206
    const response = await fetch(
207
      `https://login.microsoftonline.com/${azureResource.azureTenantId}/oauth2/token`,
208
      {
209
        method: "POST",
210
        body: new URLSearchParams({
211
          client_id: azureResource.azureClientId,
212
          client_secret: azureResource.azureClientSecret,
213
          grant_type: "client_credentials",
214
          resource: "499b84ac-1321-427f-aa17-267ca6975798/.default",
215
        }),
216
      }
217
    );
218
    const { access_token } = await response.json();
219
    repo_url = repo_url.replace(azureMatch[0], access_token);
220
  }
221
  const args = ["clone", "--quiet", "--depth", "1"];
222
  if (no_single_branch) {
223
    args.push("--no-single-branch"); // needed in case the asset branch already exists in the repo
224
  }
225
  if (subfolder !== "") {
226
    args.push("--sparse");
227
  }
228
  if (branch !== "") {
229
    args.push("--branch");
230
    args.push(branch);
231
  }
232
  args.push(repo_url);
233
  args.push(repo_name);
234
  await sh_run(-1, "git", ...args);
235
  try {
236
    process.chdir(`${cwd}/${repo_name}`);
237
    const safeDirectoryPath = process.cwd();
238
    // Add safe.directory to handle dubious ownership in cloned repo
239
    try {
240
      await sh_run(undefined, "git", "config", "--global", "--add", "safe.directory", process.cwd());
241
    } catch (e) {
242
      console.log(`Warning: Could not add safe.directory config: ${e}`);
243
    }
244

245
    if (subfolder !== "") {
246
      await sh_run(undefined, "git", "sparse-checkout", "add", subfolder);
247
      try {
248
        process.chdir(`${cwd}/${repo_name}/${subfolder}`);
249
      } catch (err) {
250
        console.log(
251
          `Error changing directory to '${cwd}/${repo_name}/${subfolder}'. Error was:\n${err}`
252
        );
253
        throw err;
254
      }
255
    }
256
    const clonedBranchName = (await sh_run(undefined, "git", "rev-parse", "--abbrev-ref", "HEAD")).trim();
257
    return { repo_name, safeDirectoryPath, clonedBranchName };
258

259
  } catch (err) {
260
    console.log(
261
      `Error changing directory to '${cwd}/${repo_name}'. Error was:\n${err}`
262
    );
263
    throw err;
264
  }
265
}
266
async function git_checkout_branch(
267
  items: SyncObject[],
268
  workspace_id: string,
269
  use_individual_branch: boolean,
270
  group_by_folder: boolean,
271
  originalBranchName: string
272
) {
273
  let branchName;
274
  if (workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
275
    branchName = get_fork_branch_name(workspace_id, originalBranchName);
276
  } else {
277

278
    if (!use_individual_branch
279
      // If individual branch is true, we can assume items is of length 1, as debouncing is disabled for jobs with this flag
280
      || items[0].path_type === "user" || items[0].path_type === "group") {
281
      return;
282
    }
283

284
    // as mentioned above, it is safe to assume that items.len is 1
285
    const [path, parent_path] = [items[0].path, items[0].parent_path];
286
    branchName = group_by_folder
287
      ? `wm_deploy/${workspace_id}/${(path ?? parent_path)
288
        ?.split("/")
289
        .slice(0, 2)
290
        .join("__")}`
291
      : `wm_deploy/${workspace_id}/${items[0].path_type}/${(
292
        path ?? parent_path
293
      )?.replaceAll("/", "__")}`;
294
  }
295

296
  try {
297
    await sh_run(undefined, "git", "checkout", branchName);
298
  } catch (err) {
299
    console.log(
300
      `Error checking out branch ${branchName}. It is possible it doesn't exist yet, tentatively creating it... Error was:\n${err}`
301
    );
302
    try {
303
      await sh_run(undefined, "git", "checkout", "-b", branchName);
304
      await sh_run(
305
        undefined,
306
        "git",
307
        "config",
308
        "--add",
309
        "--bool",
310
        "push.autoSetupRemote",
311
        "true"
312
      );
313
    } catch (err) {
314
      console.log(
315
        `Error checking out branch '${branchName}'. Error was:\n${err}`
316
      );
317
      throw err;
318
    }
319
  }
320
  console.log(`Successfully switched to branch ${branchName}`);
321
}
322

323
function composeCommitHeader(items: SyncObject[]): string {
324
  // Count occurrences of each path_type
325
  const typeCounts = new Map<PathType, number>();
326
  for (const item of items) {
327
    typeCounts.set(item.path_type, (typeCounts.get(item.path_type) ?? 0) + 1);
328
  }
329

330
  // Sort by count descending to get the top 2
331
  const sortedTypes = Array.from(typeCounts.entries()).sort((a, b) => b[1] - a[1]);
332

333
  const parts: string[] = [];
334
  let othersCount = 0;
335

336
  for (let i = 0; i < sortedTypes.length; i++) {
337
    const [pathType, count] = sortedTypes[i];
338
    if (i < 3) {
339
      // Pluralize the path type if count > 1
340
      const label = count > 1 ? `${pathType}s` : pathType;
341

342
      if (i == 2 && sortedTypes.length == 3) {
343
        parts.push(`and ${count} ${label}`);
344
      } else {
345
        parts.push(`${count} ${label}`);
346
      }
347
    } else {
348
      othersCount += count;
349
    }
350
  }
351

352
  let header = `[WM]: Deployed ${parts.join(", ")}`;
353
  if (othersCount > 0) {
354
    header += ` and ${othersCount} other object${othersCount > 1 ? "s" : ""}`;
355
  }
356

357
  return header;
358
}
359

360
async function git_push(
361
  items: SyncObject[],
362
  repo_resource: any,
363
  only_create_branch: boolean,
364
) {
365
  let user_email = process.env["WM_EMAIL"] ?? "";
366
  let user_name = process.env["WM_USERNAME"] ?? "";
367

368
  if (repo_resource.gpg_key) {
369
    await set_gpg_signing_secret(repo_resource.gpg_key);
370
    // Configure git with GPG key email for signing
371
    await sh_run(
372
      undefined,
373
      "git",
374
      "config",
375
      "user.email",
376
      repo_resource.gpg_key.email
377
    );
378
    await sh_run(undefined, "git", "config", "user.name", user_name);
379
  } else {
380
    await sh_run(undefined, "git", "config", "user.email", user_email);
381
    await sh_run(undefined, "git", "config", "user.name", user_name);
382
  }
383
  if (only_create_branch) {
384
    await sh_run(undefined, "git", "push", "--porcelain");
385
  }
386

387
  let commit_description: string[] = [];
388
  for (const { path, parent_path, commit_msg } of items) {
389
    if (path !== undefined && path !== null && path !== "") {
390
      try {
391
        await sh_run(undefined, "git", "add", "wmill-lock.yaml", `'${path}**'`);
392
      } catch (e) {
393
        console.log(`Unable to stage files matching ${path}**, ${e}`);
394
      }
395
    }
396
    if (parent_path !== undefined && parent_path !== null && parent_path !== "") {
397
      try {
398
        await sh_run(
399
          undefined,
400
          "git",
401
          "add",
402
          "wmill-lock.yaml",
403
          `'${parent_path}**'`
404
        );
405
      } catch (e) {
406
        console.log(`Unable to stage files matching ${parent_path}, ${e}`);
407
      }
408
    }
409

410
    commit_description.push(commit_msg);
411
  }
412

413
  try {
414
    await sh_run(undefined, "git", "diff", "--cached", "--quiet");
415
  } catch {
416
    // git diff returns exit-code = 1 when there's at least one staged changes
417
    const commitArgs = ["git", "commit"];
418

419
    // Always use --author to set consistent authorship
420
    commitArgs.push("--author", `"${user_name} <${user_email}>"`);
421

422
    const [header, description] = (commit_description.length == 1)
423
      ? [commit_description[0], ""]
424
      : [composeCommitHeader(items), commit_description.join("\n")];
425

426
    commitArgs.push(
427
      "-m",
428
      `"${header == undefined || header == "" ? "no commit msg" : header}"`,
429
      "-m",
430
      `"${description}"`
431
    );
432

433
    await sh_run(undefined, ...commitArgs);
434
    try {
435
      await sh_run(undefined, "git", "push", "--porcelain");
436
    } catch (e) {
437
      console.log(`Could not push, trying to rebase first: ${e}`);
438
      await sh_run(undefined, "git", "pull", "--rebase");
439
      await sh_run(undefined, "git", "push", "--porcelain");
440
    }
441
    return;
442
  }
443

444
  console.log("No changes detected, nothing to commit. Returning...");
445
}
446
async function sh_run(
447
  secret_position: number | undefined,
448
  cmd: string,
449
  ...args: string[]
450
) {
451
  const nargs = secret_position != undefined ? args.slice() : args;
452
  if (secret_position && secret_position < 0) {
453
    secret_position = nargs.length - 1 + secret_position;
454
  }
455
  let secret: string | undefined = undefined;
456
  if (secret_position != undefined) {
457
    nargs[secret_position] = "***";
458
    secret = args[secret_position];
459
  }
460

461
  console.log(`Running '${cmd} ${nargs.join(" ")} ...'`);
462
  const command = exec(`${cmd} ${args.join(" ")}`);
463
  // new Deno.Command(cmd, {
464
  //   args: args,
465
  // });
466
  try {
467
    const { stdout, stderr } = await command;
468
    if (stdout.length > 0) {
469
      console.log(stdout);
470
    }
471
    if (stderr.length > 0) {
472
      console.log(stderr);
473
    }
474
    console.log("Command successfully executed");
475
    return stdout;
476
  } catch (error) {
477
    let errorString = error.toString();
478
    if (secret) {
479
      errorString = errorString.replace(secret, "***");
480
    }
481
    const err = `SH command '${cmd} ${nargs.join(
482
      " "
483
    )}' returned with error ${errorString}`;
484
    throw Error(err);
485
  }
486
}
487

488
function regexFromPath(path_type: PathType, path: string) {
489
  if (path_type == "flow") {
490
    return `${path}.flow/*,${path}__flow/*`;
491
  } else if (path_type == "app") {
492
    return `${path}.app/*,${path}__app/*`;
493
  } else if (path_type == "raw_app") {
494
    return `${path}.raw_app/**,${path}__raw_app/**`;
495
  } else if (path_type == "folder") {
496
    return `${path}/folder.meta.*`;
497
  } else if (path_type == "resourcetype") {
498
    return `${path}.resource-type.*`;
499
  } else if (path_type == "resource") {
500
    return `${path}.resource.*`;
501
  } else if (path_type == "variable") {
502
    return `${path}.variable.*`;
503
  } else if (path_type == "schedule") {
504
    return `${path}.schedule.*`;
505
  } else if (path_type == "user") {
506
    return `${path}.user.*`;
507
  } else if (path_type == "group") {
508
    return `${path}.group.*`;
509
  } else if (path_type == "httptrigger") {
510
    return `${path}.http_trigger.*`;
511
  } else if (path_type == "websockettrigger") {
512
    return `${path}.websocket_trigger.*`;
513
  } else if (path_type == "kafkatrigger") {
514
    return `${path}.kafka_trigger.*`;
515
  } else if (path_type == "natstrigger") {
516
    return `${path}.nats_trigger.*`;
517
  } else if (path_type == "postgrestrigger") {
518
    return `${path}.postgres_trigger.*`;
519
  } else if (path_type == "mqtttrigger") {
520
    return `${path}.mqtt_trigger.*`;
521
  } else if (path_type == "sqstrigger") {
522
    return `${path}.sqs_trigger.*`;
523
  } else if (path_type == "gcptrigger") {
524
    return `${path}.gcp_trigger.*`;
525
  } else if (path_type == "azuretrigger") {
526
    return `${path}.azure_trigger.*`;
527
  } else if (path_type == "emailtrigger") {
528
    return `${path}.email_trigger.*`;
529
  } else {
530
    return `${path}.*`;
531
  }
532
}
533

534
async function wmill_sync_pull(
535
  items: SyncObject[],
536
  workspace_id: string,
537
  skip_secret: boolean,
538
  repo_url_resource_path: string,
539
  use_individual_branch: boolean,
540
  original_branch?: string,
541
) {
542
  const includes = [];
543
  for (const item of items) {
544
    const { path_type, path, parent_path } = item;
545
    if (path !== undefined && path !== null && path !== "") {
546
      includes.push(regexFromPath(path_type, path));
547
    }
548
    if (parent_path !== undefined && parent_path !== null && parent_path !== "") {
549
      includes.push(regexFromPath(path_type, parent_path));
550
    }
551
  }
552

553
  console.log("Pulling workspace into git repo");
554

555
  const args = [
556
    "sync",
557
    "pull",
558
    "--token",
559
    process.env["WM_TOKEN"] ?? "",
560
    "--workspace",
561
    workspace_id,
562
    "--base-url",
563
    process.env["BASE_URL"] + "/",
564
    "--repository",
565
    repo_url_resource_path,
566
    "--yes",
567
    skip_secret ? "--skip-secrets" : "",
568
  ];
569

570
  if (items.some(item => item.path_type === "schedule") && !use_individual_branch) {
571
    args.push("--include-schedules");
572
  }
573

574
  if (items.some(item => item.path_type === "group") && !use_individual_branch) {
575
    args.push("--include-groups");
576
  }
577

578
  if (items.some(item => item.path_type === "user") && !use_individual_branch) {
579
    args.push("--include-users");
580
  }
581

582
  if (items.some(item => item.path_type.includes("trigger")) && !use_individual_branch) {
583
    args.push("--include-triggers");
584
  }
585
  // Only include settings when specifically deploying settings
586
  if (items.some(item => item.path_type === "settings") && !use_individual_branch) {
587
    args.push("--include-settings");
588
  }
589

590
  // Only include key when specifically deploying keys
591
  if (items.some(item => item.path_type === "key") && !use_individual_branch) {
592
    args.push("--include-key");
593
  }
594

595
  args.push("--extra-includes", includes.join(","));
596

597
  // If using individual branches, apply promotion settings from original branch
598
  if (use_individual_branch && original_branch) {
599
    console.log(`Individual branch deployment detected - using promotion settings from '${original_branch}'`);
600
    args.push("--promotion", original_branch);
601
  }
602

603
  await wmill_run(3, ...args);
604
}
605

606
async function wmill_run(secret_position: number, ...cmd: string[]) {
607
  cmd = cmd.filter((elt) => elt !== "");
608
  const cmd2 = cmd.slice();
609
  cmd2[secret_position] = "***";
610
  console.log(`Running 'wmill ${cmd2.join(" ")} ...'`);
611
  await wmill.parse(cmd);
612
  console.log("Command successfully executed");
613
}
614

615
// Function to set up GPG signing
616
async function set_gpg_signing_secret(gpg_key: GpgKey) {
617
  try {
618
    console.log("Setting GPG private key for git commits");
619

620
    const formattedGpgContent = gpg_key.private_key.replace(
621
      /(-----BEGIN PGP PRIVATE KEY BLOCK-----)([\s\S]*?)(-----END PGP PRIVATE KEY BLOCK-----)/,
622
      (_: string, header: string, body: string, footer: string) =>
623
        header +
624
        "\n" +
625
        "\n" +
626
        body.replace(/ ([^\s])/g, "\n$1").trim() +
627
        "\n" +
628
        footer
629
    );
630

631
    const gpg_path = `/tmp/gpg`;
632
    await sh_run(undefined, "mkdir", "-p", gpg_path);
633
    await sh_run(undefined, "chmod", "700", gpg_path);
634
    process.env.GNUPGHOME = gpg_path;
635
    // process.env.GIT_TRACE = 1;
636

637
    try {
638
      await sh_run(
639
        1,
640
        "bash",
641
        "-c",
642
        `cat <<EOF | gpg --batch --import \n${formattedGpgContent}\nEOF`
643
      );
644
    } catch (e) {
645
      // Original error would contain sensitive data
646
      throw new Error("Failed to import GPG key!");
647
    }
648

649
    const listKeysOutput = await sh_run(
650
      undefined,
651
      "gpg",
652
      "--list-secret-keys",
653
      "--with-colons",
654
      "--keyid-format=long"
655
    );
656

657
    const keyInfoMatch = listKeysOutput.match(
658
      /sec:[^:]*:[^:]*:[^:]*:([A-F0-9]+):.*\nfpr:::::::::([A-F0-9]{40}):/
659
    );
660

661
    if (!keyInfoMatch) {
662
      throw new Error("Failed to extract GPG Key ID and Fingerprint");
663
    }
664

665
    const keyId = keyInfoMatch[1];
666
    gpgFingerprint = keyInfoMatch[2];
667

668
    if (gpg_key.passphrase) {
669
      // This is adummy command to unlock the key
670
      // with passphrase to load it into agent
671
      await sh_run(
672
        1,
673
        "bash",
674
        "-c",
675
        `echo "dummy" | gpg --batch --pinentry-mode loopback --passphrase '${gpg_key.passphrase}' --status-fd=2 -bsau ${keyId}`
676
      );
677
    }
678

679
    // Configure Git to use the extracted key
680
    await sh_run(undefined, "git", "config", "user.signingkey", keyId);
681
    await sh_run(undefined, "git", "config", "commit.gpgsign", "true");
682
    console.log(`GPG signing configured with key ID: ${keyId} `);
683
  } catch (e) {
684
    console.error(`Failure while setting GPG key: ${e} `);
685
    await delete_pgp_keys();
686
  }
687
}
688

689
async function delete_pgp_keys() {
690
  console.log("deleting gpg keys");
691
  if (gpgFingerprint) {
692
    await sh_run(
693
      undefined,
694
      "gpg",
695
      "--batch",
696
      "--yes",
697
      "--pinentry-mode",
698
      "loopback",
699
      "--delete-secret-key",
700
      gpgFingerprint
701
    );
702
    await sh_run(
703
      undefined,
704
      "gpg",
705
      "--batch",
706
      "--yes",
707
      "--delete-key",
708
      "--pinentry-mode",
709
      "loopback",
710
      gpgFingerprint
711
    );
712
  }
713
}
714

715
async function get_gh_app_token() {
716
  const workspace = process.env["WM_WORKSPACE"];
717
  const jobToken = process.env["WM_TOKEN"];
718

719
  const baseUrl =
720
    process.env["BASE_INTERNAL_URL"] ??
721
    process.env["BASE_URL"] ??
722
    "http://localhost:8000";
723

724
  const url = `${baseUrl}/api/w/${workspace}/github_app/token`;
725

726
  const response = await fetch(url, {
727
    method: "POST",
728
    headers: {
729
      "Content-Type": "application/json",
730
      Authorization: `Bearer ${jobToken}`,
731
    },
732
    body: JSON.stringify({
733
      job_token: jobToken,
734
    }),
735
  });
736

737
  if (!response.ok) {
738
    const errorBody = await response.text().catch(() => "");
739
    throw new Error(`GitHub App token error (${response.status}): ${errorBody || response.statusText}`);
740
  }
741

742
  const data = await response.json();
743

744
  return data.token;
745
}
746

747
function prependTokenToGitHubUrl(gitHubUrl: string, installationToken: string) {
748
  if (!gitHubUrl || !installationToken) {
749
    throw new Error("Both GitHub URL and Installation Token are required.");
750
  }
751

752
  const url = new URL(gitHubUrl);
753
  return `https://x-access-token:${installationToken}@${url.hostname}${url.pathname}`;
754
}
755

Other submissions