Sync script to Git repo
One script reply has been approved by the moderators Verified

This script will pull the script from the current workspace in a temporary folder, then commit it to the remote Git repository and push it. Only the script-related file will be pushed, nothing else. It takes as input the git_repository resource containing the repository URL, the script path, and the commit message to use for the commit. All params are mandatory.

Created by hugo697 785 days ago Picked 60 times
Submitted by pyranota275 Bun
Verified 3 days ago
1
import * as wmillclient from "windmill-client";
2
import wmill from "[email protected]";
3
import { basename } from "node:path";
4
const util = require("util");
5
const exec = util.promisify(require("child_process").exec);
6
import process from "process";
7

8
type GpgKey = {
9
  email: string;
10
  private_key: string;
11
  passphrase: string;
12
};
13

14
const FORKED_WORKSPACE_PREFIX = "wm-fork-";
15
const FORKED_BRANCH_PREFIX = "wm-fork";
16

17
type PathType =
18
  | "script"
19
  | "flow"
20
  | "app"
21
  | "raw_app"
22
  | "folder"
23
  | "resource"
24
  | "variable"
25
  | "resourcetype"
26
  | "schedule"
27
  | "user"
28
  | "group"
29
  | "httptrigger"
30
  | "websockettrigger"
31
  | "kafkatrigger"
32
  | "natstrigger"
33
  | "postgrestrigger"
34
  | "mqtttrigger"
35
  | "sqstrigger"
36
  | "gcptrigger"
37
  | "emailtrigger";
38

39
type SyncObject = {
40
  path_type: PathType;
41
  path: string | undefined;
42
  parent_path: string | undefined;
43
  commit_msg: string;
44
};
45

46
let gpgFingerprint: string | undefined = undefined;
47

48
export async function main(
49
  items: SyncObject[],
50
  // Compat, do not use in code, rely on `items` instead
51
  path_type: PathType | undefined,
52
  path: string | undefined,
53
  parent_path: string | undefined,
54
  commit_msg: string | undefined,
55
  //
56
  workspace_id: string,
57
  repo_url_resource_path: string,
58
  skip_secret: boolean = true,
59
  use_individual_branch: boolean = false,
60
  group_by_folder: boolean = false,
61
  only_create_branch: boolean = false,
62
  parent_workspace_id?: string,
63
) {
64

65
  if (path_type !== undefined && commit_msg !== undefined) {
66
    items = [{
67
      path_type,
68
      path,
69
      parent_path,
70
      commit_msg,
71
    }];
72
  }
73
  await inner(items, workspace_id, repo_url_resource_path, skip_secret, use_individual_branch, group_by_folder, only_create_branch, parent_workspace_id);
74
}
75

76
async function inner(
77
  items: SyncObject[],
78
  workspace_id: string,
79
  repo_url_resource_path: string,
80
  skip_secret: boolean = true,
81
  use_individual_branch: boolean = false,
82
  group_by_folder: boolean = false,
83
  only_create_branch: boolean = false,
84
  parent_workspace_id?: string,
85
) {
86

87
  let safeDirectoryPath: string | undefined;
88
  const repo_resource = await wmillclient.getResource(repo_url_resource_path);
89
  const cwd = process.cwd();
90
  process.env["HOME"] = ".";
91
  if (!only_create_branch) {
92
    for (const item of items) {
93
      console.log(
94
        `Syncing ${item.path_type} ${item.path ?? ""} with parent ${item.parent_path ?? ""}`
95
      );
96
    }
97
  }
98

99
  if (repo_resource.is_github_app) {
100
    const token = await get_gh_app_token();
101
    const authRepoUrl = prependTokenToGitHubUrl(repo_resource.url, token);
102
    repo_resource.url = authRepoUrl;
103
  }
104

105
  const { repo_name, safeDirectoryPath: cloneSafeDirectoryPath, clonedBranchName } = await git_clone(cwd, repo_resource, use_individual_branch || workspace_id.startsWith(FORKED_WORKSPACE_PREFIX));
106
  safeDirectoryPath = cloneSafeDirectoryPath;
107

108

109
  // Since we don't modify the resource on the forked workspaces, we have to cosnider the case of
110
  // a fork of a fork workspace. In that case, the original branch is not stored in the resource
111
  // settings, but we need to infer it from the workspace id
112

113
  if (workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
114
    if (use_individual_branch) {
115
      console.log("Cannot have `use_individual_branch` in a forked workspace, disabling option`");
116
      use_individual_branch = false;
117
    }
118
    if (group_by_folder) {
119
      console.log("Cannot have `group_by_folder` in a forked workspace, disabling option`");
120
      group_by_folder = false;
121
    }
122
  }
123

124
  if (parent_workspace_id && parent_workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
125
    const parentBranch = get_fork_branch_name(parent_workspace_id, clonedBranchName);
126
    console.log(`This workspace's parent is also a fork, moving to branch ${parentBranch} in case a new branch needs to be created with the appropriate root`);
127
    await git_checkout_branch(
128
      items,
129
      parent_workspace_id,
130
      use_individual_branch,
131
      group_by_folder,
132
      clonedBranchName
133
    );
134
  }
135

136
  await git_checkout_branch(
137
    items,
138
    workspace_id,
139
    use_individual_branch,
140
    group_by_folder,
141
    clonedBranchName
142
  );
143

144

145
  const subfolder = repo_resource.folder ?? "";
146
  const branch_or_default = repo_resource.branch ?? "<DEFAULT>";
147
  console.log(
148
    `Pushing to repository ${repo_name} in subfolder ${subfolder} on branch ${branch_or_default}`
149
  );
150

151
  // If we want to just create the branch, we can skip pulling the changes.
152
  if (!only_create_branch) {
153
    await wmill_sync_pull(
154
      items,
155
      workspace_id,
156
      skip_secret,
157
      repo_url_resource_path,
158
      use_individual_branch,
159
      repo_resource.branch
160
    );
161
  }
162
  try {
163
    await git_push(items, repo_resource, only_create_branch);
164
  } catch (e) {
165
    throw e;
166
  } finally {
167
    await delete_pgp_keys();
168
    // Cleanup: remove safe.directory config
169
    if (safeDirectoryPath) {
170
      try {
171
        await sh_run(undefined, "git", "config", "--global", "--unset", "safe.directory", safeDirectoryPath);
172
      } catch (e) {
173
        console.log(`Warning: Could not unset safe.directory config: ${e}`);
174
      }
175
    }
176
  }
177
  console.log("Finished syncing");
178
  process.chdir(`${cwd}`);
179
}
180

181

182
function get_fork_branch_name(w_id: string, originalBranch: string): string {
183
  if (w_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
184
    return w_id.replace(FORKED_WORKSPACE_PREFIX, `${FORKED_BRANCH_PREFIX}/${originalBranch}/`);
185
  }
186
  return w_id;
187
}
188

189
async function git_clone(
190
  cwd: string,
191
  repo_resource: any,
192
  no_single_branch: boolean,
193
): Promise<{ repo_name: string; safeDirectoryPath: string; clonedBranchName: string }> {
194
  // TODO: handle private SSH keys as well
195
  let repo_url = repo_resource.url;
196
  const subfolder = repo_resource.folder ?? "";
197
  const branch = repo_resource.branch ?? "";
198
  const repo_name = basename(repo_url, ".git");
199
  const azureMatch = repo_url.match(/AZURE_DEVOPS_TOKEN\((?<url>.+)\)/);
200
  if (azureMatch) {
201
    console.log(
202
      "Requires Azure DevOps service account access token, requesting..."
203
    );
204
    const azureResource = await wmillclient.getResource(azureMatch.groups.url);
205
    const response = await fetch(
206
      `https://login.microsoftonline.com/${azureResource.azureTenantId}/oauth2/token`,
207
      {
208
        method: "POST",
209
        body: new URLSearchParams({
210
          client_id: azureResource.azureClientId,
211
          client_secret: azureResource.azureClientSecret,
212
          grant_type: "client_credentials",
213
          resource: "499b84ac-1321-427f-aa17-267ca6975798/.default",
214
        }),
215
      }
216
    );
217
    const { access_token } = await response.json();
218
    repo_url = repo_url.replace(azureMatch[0], access_token);
219
  }
220
  const args = ["clone", "--quiet", "--depth", "1"];
221
  if (no_single_branch) {
222
    args.push("--no-single-branch"); // needed in case the asset branch already exists in the repo
223
  }
224
  if (subfolder !== "") {
225
    args.push("--sparse");
226
  }
227
  if (branch !== "") {
228
    args.push("--branch");
229
    args.push(branch);
230
  }
231
  args.push(repo_url);
232
  args.push(repo_name);
233
  await sh_run(-1, "git", ...args);
234
  try {
235
    process.chdir(`${cwd}/${repo_name}`);
236
    const safeDirectoryPath = process.cwd();
237
    // Add safe.directory to handle dubious ownership in cloned repo
238
    try {
239
      await sh_run(undefined, "git", "config", "--global", "--add", "safe.directory", process.cwd());
240
    } catch (e) {
241
      console.log(`Warning: Could not add safe.directory config: ${e}`);
242
    }
243

244
    if (subfolder !== "") {
245
      await sh_run(undefined, "git", "sparse-checkout", "add", subfolder);
246
      try {
247
        process.chdir(`${cwd}/${repo_name}/${subfolder}`);
248
      } catch (err) {
249
        console.log(
250
          `Error changing directory to '${cwd}/${repo_name}/${subfolder}'. Error was:\n${err}`
251
        );
252
        throw err;
253
      }
254
    }
255
    const clonedBranchName = (await sh_run(undefined, "git", "rev-parse", "--abbrev-ref", "HEAD")).trim();
256
    return { repo_name, safeDirectoryPath, clonedBranchName };
257

258
  } catch (err) {
259
    console.log(
260
      `Error changing directory to '${cwd}/${repo_name}'. Error was:\n${err}`
261
    );
262
    throw err;
263
  }
264
}
265
async function git_checkout_branch(
266
  items: SyncObject[],
267
  workspace_id: string,
268
  use_individual_branch: boolean,
269
  group_by_folder: boolean,
270
  originalBranchName: string
271
) {
272
  let branchName;
273
  if (workspace_id.startsWith(FORKED_WORKSPACE_PREFIX)) {
274
    branchName = get_fork_branch_name(workspace_id, originalBranchName);
275
  } else {
276

277
    if (!use_individual_branch
278
      // If individual branch is true, we can assume items is of length 1, as debouncing is disabled for jobs with this flag
279
      || items[0].path_type === "user" || items[0].path_type === "group") {
280
      return;
281
    }
282

283
    // as mentioned above, it is safe to assume that items.len is 1
284
    const [path, parent_path] = [items[0].path, items[0].parent_path];
285
    branchName = group_by_folder
286
      ? `wm_deploy/${workspace_id}/${(path ?? parent_path)
287
        ?.split("/")
288
        .slice(0, 2)
289
        .join("__")}`
290
      : `wm_deploy/${workspace_id}/${items[0].path_type}/${(
291
        path ?? parent_path
292
      )?.replaceAll("/", "__")}`;
293
  }
294

295
  try {
296
    await sh_run(undefined, "git", "checkout", branchName);
297
  } catch (err) {
298
    console.log(
299
      `Error checking out branch ${branchName}. It is possible it doesn't exist yet, tentatively creating it... Error was:\n${err}`
300
    );
301
    try {
302
      await sh_run(undefined, "git", "checkout", "-b", branchName);
303
      await sh_run(
304
        undefined,
305
        "git",
306
        "config",
307
        "--add",
308
        "--bool",
309
        "push.autoSetupRemote",
310
        "true"
311
      );
312
    } catch (err) {
313
      console.log(
314
        `Error checking out branch '${branchName}'. Error was:\n${err}`
315
      );
316
      throw err;
317
    }
318
  }
319
  console.log(`Successfully switched to branch ${branchName}`);
320
}
321

322
function composeCommitHeader(items: SyncObject[]): string {
323
  // Count occurrences of each path_type
324
  const typeCounts = new Map<PathType, number>();
325
  for (const item of items) {
326
    typeCounts.set(item.path_type, (typeCounts.get(item.path_type) ?? 0) + 1);
327
  }
328

329
  // Sort by count descending to get the top 2
330
  const sortedTypes = Array.from(typeCounts.entries()).sort((a, b) => b[1] - a[1]);
331

332
  const parts: string[] = [];
333
  let othersCount = 0;
334

335
  for (let i = 0; i < sortedTypes.length; i++) {
336
    const [pathType, count] = sortedTypes[i];
337
    if (i < 3) {
338
      // Pluralize the path type if count > 1
339
      const label = count > 1 ? `${pathType}s` : pathType;
340

341
      if (i == 2 && sortedTypes.length == 3) {
342
        parts.push(`and ${count} ${label}`);
343
      } else {
344
        parts.push(`${count} ${label}`);
345
      }
346
    } else {
347
      othersCount += count;
348
    }
349
  }
350

351
  let header = `[WM]: Deployed ${parts.join(", ")}`;
352
  if (othersCount > 0) {
353
    header += ` and ${othersCount} other object${othersCount > 1 ? "s" : ""}`;
354
  }
355

356
  return header;
357
}
358

359
async function git_push(
360
  items: SyncObject[],
361
  repo_resource: any,
362
  only_create_branch: boolean,
363
) {
364
  let user_email = process.env["WM_EMAIL"] ?? "";
365
  let user_name = process.env["WM_USERNAME"] ?? "";
366

367
  if (repo_resource.gpg_key) {
368
    await set_gpg_signing_secret(repo_resource.gpg_key);
369
    // Configure git with GPG key email for signing
370
    await sh_run(
371
      undefined,
372
      "git",
373
      "config",
374
      "user.email",
375
      repo_resource.gpg_key.email
376
    );
377
    await sh_run(undefined, "git", "config", "user.name", user_name);
378
  } else {
379
    await sh_run(undefined, "git", "config", "user.email", user_email);
380
    await sh_run(undefined, "git", "config", "user.name", user_name);
381
  }
382
  if (only_create_branch) {
383
    await sh_run(undefined, "git", "push", "--porcelain");
384
  }
385

386
  let commit_description: string[] = [];
387
  for (const { path, parent_path, commit_msg } of items) {
388
    if (path !== undefined && path !== null && path !== "") {
389
      try {
390
        await sh_run(undefined, "git", "add", "wmill-lock.yaml", `${path}**`);
391
      } catch (e) {
392
        console.log(`Unable to stage files matching ${path}**, ${e}`);
393
      }
394
    }
395
    if (parent_path !== undefined && parent_path !== null && parent_path !== "") {
396
      try {
397
        await sh_run(
398
          undefined,
399
          "git",
400
          "add",
401
          "wmill-lock.yaml",
402
          `${parent_path}**`
403
        );
404
      } catch (e) {
405
        console.log(`Unable to stage files matching ${parent_path}, ${e}`);
406
      }
407
    }
408

409
    commit_description.push(commit_msg);
410
  }
411

412
  try {
413
    await sh_run(undefined, "git", "diff", "--cached", "--quiet");
414
  } catch {
415
    // git diff returns exit-code = 1 when there's at least one staged changes
416
    const commitArgs = ["git", "commit"];
417

418
    // Always use --author to set consistent authorship
419
    commitArgs.push("--author", `"${user_name} <${user_email}>"`);
420

421
    const [header, description] = (commit_description.length == 1)
422
      ? [commit_description[0], ""]
423
      : [composeCommitHeader(items), commit_description.join("\n")];
424

425
    commitArgs.push(
426
      "-m",
427
      `"${header == undefined || header == "" ? "no commit msg" : header}"`,
428
      "-m",
429
      `"${description}"`
430
    );
431

432
    await sh_run(undefined, ...commitArgs);
433
    try {
434
      await sh_run(undefined, "git", "push", "--porcelain");
435
    } catch (e) {
436
      console.log(`Could not push, trying to rebase first: ${e}`);
437
      await sh_run(undefined, "git", "pull", "--rebase");
438
      await sh_run(undefined, "git", "push", "--porcelain");
439
    }
440
    return;
441
  }
442

443
  console.log("No changes detected, nothing to commit. Returning...");
444
}
445
async function sh_run(
446
  secret_position: number | undefined,
447
  cmd: string,
448
  ...args: string[]
449
) {
450
  const nargs = secret_position != undefined ? args.slice() : args;
451
  if (secret_position && secret_position < 0) {
452
    secret_position = nargs.length - 1 + secret_position;
453
  }
454
  let secret: string | undefined = undefined;
455
  if (secret_position != undefined) {
456
    nargs[secret_position] = "***";
457
    secret = args[secret_position];
458
  }
459

460
  console.log(`Running '${cmd} ${nargs.join(" ")} ...'`);
461
  const command = exec(`${cmd} ${args.join(" ")}`);
462
  // new Deno.Command(cmd, {
463
  //   args: args,
464
  // });
465
  try {
466
    const { stdout, stderr } = await command;
467
    if (stdout.length > 0) {
468
      console.log(stdout);
469
    }
470
    if (stderr.length > 0) {
471
      console.log(stderr);
472
    }
473
    console.log("Command successfully executed");
474
    return stdout;
475
  } catch (error) {
476
    let errorString = error.toString();
477
    if (secret) {
478
      errorString = errorString.replace(secret, "***");
479
    }
480
    const err = `SH command '${cmd} ${nargs.join(
481
      " "
482
    )}' returned with error ${errorString}`;
483
    throw Error(err);
484
  }
485
}
486

487
function regexFromPath(path_type: PathType, path: string) {
488
  if (path_type == "flow") {
489
    return `${path}.flow/*,${path}__flow/*`;
490
  } else if (path_type == "app") {
491
    return `${path}.app/*,${path}__app/*`;
492
  } else if (path_type == "raw_app") {
493
    return `${path}.raw_app/**,${path}__raw_app/**`;
494
  } else if (path_type == "folder") {
495
    return `${path}/folder.meta.*`;
496
  } else if (path_type == "resourcetype") {
497
    return `${path}.resource-type.*`;
498
  } else if (path_type == "resource") {
499
    return `${path}.resource.*`;
500
  } else if (path_type == "variable") {
501
    return `${path}.variable.*`;
502
  } else if (path_type == "schedule") {
503
    return `${path}.schedule.*`;
504
  } else if (path_type == "user") {
505
    return `${path}.user.*`;
506
  } else if (path_type == "group") {
507
    return `${path}.group.*`;
508
  } else if (path_type == "httptrigger") {
509
    return `${path}.http_trigger.*`;
510
  } else if (path_type == "websockettrigger") {
511
    return `${path}.websocket_trigger.*`;
512
  } else if (path_type == "kafkatrigger") {
513
    return `${path}.kafka_trigger.*`;
514
  } else if (path_type == "natstrigger") {
515
    return `${path}.nats_trigger.*`;
516
  } else if (path_type == "postgrestrigger") {
517
    return `${path}.postgres_trigger.*`;
518
  } else if (path_type == "mqtttrigger") {
519
    return `${path}.mqtt_trigger.*`;
520
  } else if (path_type == "sqstrigger") {
521
    return `${path}.sqs_trigger.*`;
522
  } else if (path_type == "gcptrigger") {
523
    return `${path}.gcp_trigger.*`;
524
  } else if (path_type == "emailtrigger") {
525
    return `${path}.email_trigger.*`;
526
  } else {
527
    return `${path}.*`;
528
  }
529
}
530

531
async function wmill_sync_pull(
532
  items: SyncObject[],
533
  workspace_id: string,
534
  skip_secret: boolean,
535
  repo_url_resource_path: string,
536
  use_individual_branch: boolean,
537
  original_branch?: string
538
) {
539
  const includes = [];
540
  for (const item of items) {
541
    const { path_type, path, parent_path } = item;
542
    if (path !== undefined && path !== null && path !== "") {
543
      includes.push(regexFromPath(path_type, path));
544
    }
545
    if (parent_path !== undefined && parent_path !== null && parent_path !== "") {
546
      includes.push(regexFromPath(path_type, parent_path));
547
    }
548
  }
549

550
  console.log("Pulling workspace into git repo");
551

552
  const args = [
553
    "sync",
554
    "pull",
555
    "--token",
556
    process.env["WM_TOKEN"] ?? "",
557
    "--workspace",
558
    workspace_id,
559
    "--base-url",
560
    process.env["BASE_URL"] + "/",
561
    "--repository",
562
    repo_url_resource_path,
563
    "--yes",
564
    skip_secret ? "--skip-secrets" : "",
565
  ];
566

567
  if (items.some(item => item.path_type === "schedule") && !use_individual_branch) {
568
    args.push("--include-schedules");
569
  }
570

571
  if (items.some(item => item.path_type === "group") && !use_individual_branch) {
572
    args.push("--include-groups");
573
  }
574

575
  if (items.some(item => item.path_type === "user") && !use_individual_branch) {
576
    args.push("--include-users");
577
  }
578

579
  if (items.some(item => item.path_type.includes("trigger")) && !use_individual_branch) {
580
    args.push("--include-triggers");
581
  }
582
  // Only include settings when specifically deploying settings
583
  if (items.some(item => item.path_type === "settings") && !use_individual_branch) {
584
    args.push("--include-settings");
585
  }
586

587
  // Only include key when specifically deploying keys
588
  if (items.some(item => item.path_type === "key") && !use_individual_branch) {
589
    args.push("--include-key");
590
  }
591

592
  args.push("--extra-includes", includes.join(","));
593

594
  // If using individual branches, apply promotion settings from original branch
595
  if (use_individual_branch && original_branch) {
596
    console.log(`Individual branch deployment detected - using promotion settings from '${original_branch}'`);
597
    args.push("--promotion", original_branch);
598
  }
599

600
  await wmill_run(3, ...args);
601
}
602

603
async function wmill_run(secret_position: number, ...cmd: string[]) {
604
  cmd = cmd.filter((elt) => elt !== "");
605
  const cmd2 = cmd.slice();
606
  cmd2[secret_position] = "***";
607
  console.log(`Running 'wmill ${cmd2.join(" ")} ...'`);
608
  await wmill.parse(cmd);
609
  console.log("Command successfully executed");
610
}
611

612
// Function to set up GPG signing
613
async function set_gpg_signing_secret(gpg_key: GpgKey) {
614
  try {
615
    console.log("Setting GPG private key for git commits");
616

617
    const formattedGpgContent = gpg_key.private_key.replace(
618
      /(-----BEGIN PGP PRIVATE KEY BLOCK-----)([\s\S]*?)(-----END PGP PRIVATE KEY BLOCK-----)/,
619
      (_: string, header: string, body: string, footer: string) =>
620
        header +
621
        "\n" +
622
        "\n" +
623
        body.replace(/ ([^\s])/g, "\n$1").trim() +
624
        "\n" +
625
        footer
626
    );
627

628
    const gpg_path = `/tmp/gpg`;
629
    await sh_run(undefined, "mkdir", "-p", gpg_path);
630
    await sh_run(undefined, "chmod", "700", gpg_path);
631
    process.env.GNUPGHOME = gpg_path;
632
    // process.env.GIT_TRACE = 1;
633

634
    try {
635
      await sh_run(
636
        1,
637
        "bash",
638
        "-c",
639
        `cat <<EOF | gpg --batch --import \n${formattedGpgContent}\nEOF`
640
      );
641
    } catch (e) {
642
      // Original error would contain sensitive data
643
      throw new Error("Failed to import GPG key!");
644
    }
645

646
    const listKeysOutput = await sh_run(
647
      undefined,
648
      "gpg",
649
      "--list-secret-keys",
650
      "--with-colons",
651
      "--keyid-format=long"
652
    );
653

654
    const keyInfoMatch = listKeysOutput.match(
655
      /sec:[^:]*:[^:]*:[^:]*:([A-F0-9]+):.*\nfpr:::::::::([A-F0-9]{40}):/
656
    );
657

658
    if (!keyInfoMatch) {
659
      throw new Error("Failed to extract GPG Key ID and Fingerprint");
660
    }
661

662
    const keyId = keyInfoMatch[1];
663
    gpgFingerprint = keyInfoMatch[2];
664

665
    if (gpg_key.passphrase) {
666
      // This is adummy command to unlock the key
667
      // with passphrase to load it into agent
668
      await sh_run(
669
        1,
670
        "bash",
671
        "-c",
672
        `echo "dummy" | gpg --batch --pinentry-mode loopback --passphrase '${gpg_key.passphrase}' --status-fd=2 -bsau ${keyId}`
673
      );
674
    }
675

676
    // Configure Git to use the extracted key
677
    await sh_run(undefined, "git", "config", "user.signingkey", keyId);
678
    await sh_run(undefined, "git", "config", "commit.gpgsign", "true");
679
    console.log(`GPG signing configured with key ID: ${keyId} `);
680
  } catch (e) {
681
    console.error(`Failure while setting GPG key: ${e} `);
682
    await delete_pgp_keys();
683
  }
684
}
685

686
async function delete_pgp_keys() {
687
  console.log("deleting gpg keys");
688
  if (gpgFingerprint) {
689
    await sh_run(
690
      undefined,
691
      "gpg",
692
      "--batch",
693
      "--yes",
694
      "--pinentry-mode",
695
      "loopback",
696
      "--delete-secret-key",
697
      gpgFingerprint
698
    );
699
    await sh_run(
700
      undefined,
701
      "gpg",
702
      "--batch",
703
      "--yes",
704
      "--delete-key",
705
      "--pinentry-mode",
706
      "loopback",
707
      gpgFingerprint
708
    );
709
  }
710
}
711

712
async function get_gh_app_token() {
713
  const workspace = process.env["WM_WORKSPACE"];
714
  const jobToken = process.env["WM_TOKEN"];
715

716
  const baseUrl =
717
    process.env["BASE_INTERNAL_URL"] ??
718
    process.env["BASE_URL"] ??
719
    "http://localhost:8000";
720

721
  const url = `${baseUrl}/api/w/${workspace}/github_app/token`;
722

723
  const response = await fetch(url, {
724
    method: "POST",
725
    headers: {
726
      "Content-Type": "application/json",
727
      Authorization: `Bearer ${jobToken}`,
728
    },
729
    body: JSON.stringify({
730
      job_token: jobToken,
731
    }),
732
  });
733

734
  if (!response.ok) {
735
    throw new Error(`Error: ${response.statusText}`);
736
  }
737

738
  const data = await response.json();
739

740
  return data.token;
741
}
742

743
function prependTokenToGitHubUrl(gitHubUrl: string, installationToken: string) {
744
  if (!gitHubUrl || !installationToken) {
745
    throw new Error("Both GitHub URL and Installation Token are required.");
746
  }
747

748
  try {
749
    const url = new URL(gitHubUrl);
750

751
    // GitHub repository URL should be in the format: https://github.com/owner/repo.git
752
    if (url.hostname !== "github.com") {
753
      throw new Error(
754
        "Invalid GitHub URL. Must be in the format 'https://github.com/owner/repo.git'."
755
      );
756
    }
757

758
    // Convert URL to include the installation token
759
    return `https://x-access-token:${installationToken}@github.com${url.pathname}`;
760
  } catch (e) {
761
    const error = e as Error;
762
    throw new Error(`Invalid URL: ${error.message}`);
763
  }
764
}
Other submissions