1
Import workspace from tarball
One script reply has been approved by the moderators Verified

Takes a tarball in and import scripts, resources, and resource types from a tarball exported of a workspace.

Is used as webhook target for the windmill-gh-action-deploy

Created by admin 547 days ago Viewed 4509 times
0
Submitted by admin Python3
Verified 547 days ago
1
import tarfile
2
import io
3
import wmill
4
import json
5

6
from windmill_api.api.script import get_script_by_path, create_script
7
from windmill_api.models.create_script_json_body import CreateScriptJsonBody
8
from windmill_api.models.create_script_json_body_language import (
9
    CreateScriptJsonBodyLanguage,
10
)
11

12

13
from windmill_api.models.create_script_json_body_schema import (
14
    CreateScriptJsonBodySchema,
15
)
16
from windmill_api.models.update_resource_json_body import UpdateResourceJsonBody
17
from windmill_api.models.create_resource_json_body import CreateResourceJsonBody
18

19
from windmill_api.models.update_resource_type_json_body import (
20
    UpdateResourceTypeJsonBody,
21
)
22

23
from windmill_api.models.create_resource_type_json_body import (
24
    CreateResourceTypeJsonBody,
25
)
26
from windmill_api.api.resource import (
27
    create_resource,
28
    create_resource_type,
29
    update_resource,
30
    update_resource_type,
31
    get_resource,
32
    get_resource_type,
33
)
34

35
client = wmill.create_client()
36
workspace = wmill.get_workspace()
37

38

39
SCRIPTS_PREFIX = "scripts/"
40
RESOURCES_PREFIX = "resources/"
41
RESOURCE_TYPES_PREFIX = "resource_types/"
42

43

44
def main(tarball: bytes, dry_run: bool = False):
45
    print("Tarball of size: {} bytes".format(len(tarball)))
46
    io_bytes = io.BytesIO(tarball)
47
    tar = tarfile.open(fileobj=io_bytes, mode="r")
48
    names = tar.getnames()
49

50
    for m in tar.getmembers():
51
        if m.name.startswith(SCRIPTS_PREFIX) and (
52
            m.name.endswith(".py") or m.name.endswith(".ts") or m.name.endswith(".go")
53
        ):
54
            path = m.name[len(SCRIPTS_PREFIX) : -len(".py")]
55
            print("Processing script {}, {}".format(path, m.name))
56
            get_script_response = get_script_by_path.sync_detailed(
57
                workspace=workspace, path=path, client=client
58
            )
59

60

61
            json_tar_path = "{}.json".format(m.name[: -len(".py")])
62
            has_json = json_tar_path in names
63

64
            parent_hash = None
65
            summary = None
66
            description = None
67
            is_template = None
68
            schema = None
69

70
            content = tar.extractfile(m).read().decode("utf-8")
71

72
            if get_script_response.status_code == 200:
73
                old_script = json.loads(get_script_response.content)
74

75
                if old_script["content"] == content:
76
                    if not has_json:
77
                        print(
78
                            "same content and no metadata for this script in tarball, no need to update"
79
                        )
80
                        continue
81

82
                    json_content = tar.extractfile(tar.getmember(json_tar_path)).read()
83
                    metadata = json.loads(json_content)
84

85
                    if (
86
                        old_script.get("summary") == metadata.get("summary")
87
                        and old_script.get("description") == metadata.get("description")
88
                        and old_script.get("is_template") == metadata.get("is_template")
89
                        and old_script.get("schema") == metadata.get("schema")
90
                        and old_script.get("lock") == metadata.get("lock")
91
                    ):
92
                        print("same content and no metadata, no need to update")
93
                        continue
94

95
                parent_hash = old_script["hash"]
96
                summary = old_script.get("summary")
97
                description = old_script.get("description")
98
                is_template = old_script.get("is_template")
99
                schema = old_script.get("schema")
100
                lock = None
101

102
            if has_json:
103
                json_content = tar.extractfile(tar.getmember(json_tar_path)).read()
104
                metadata = json.loads(json_content)
105

106
                summary = metadata.get("summary")
107
                description = metadata.get("description")
108
                is_template = metadata.get("is_template")
109
                schema = metadata.get("schema")
110
                lock = metadata.get("lock")
111
                if lock == []:
112
                    lock = None
113

114
            if schema:
115
                schema = CreateScriptJsonBodySchema.from_dict(schema)
116
            else:
117
                schema = CreateScriptJsonBodySchema.from_dict(dict())
118

119
            print("Uploading new version of script at path: {}".format(path))
120

121
            if dry_run:
122
                print("Skipped because dry-run")
123
            else:
124
                if m.name.endswith(".ts"):
125
                    language = CreateScriptJsonBodyLanguage.DENO
126
                elif m.name.endswith(".py"):
127
                    language = CreateScriptJsonBodyLanguage.PYTHON3
128
                elif m.name.endswith(".go"):
129
                    language = CreateScriptJsonBodyLanguage.GO
130

131
                r = create_script.sync_detailed(
132
                    workspace=workspace,
133
                    client=client,
134
                    json_body=CreateScriptJsonBody(
135
                        language=language,
136
                        content=content,
137
                        path=path,
138
                        parent_hash=parent_hash,
139
                        summary=summary,
140
                        description=description,
141
                        is_template=is_template,
142
                        schema=schema,
143
                        lock=lock,
144
                    ),
145
                )
146
                print(r.content)
147
        if m.name.startswith(RESOURCES_PREFIX) and m.name.endswith(".json"):
148
            path = m.name[len(RESOURCES_PREFIX) : -len(".json")]
149
            print("Processing resource {}, {}".format(path, m.name))
150
            get_resource_response = get_resource.sync_detailed(
151
                workspace=workspace, path=path, client=client
152
            )
153
            content = tar.extractfile(m).read().decode("utf-8")
154
            resource = json.loads(content)
155
            res_error = False
156
            if not res_error and get_resource_response.status_code == 200:
157
                old_resource = json.loads(get_resource_response.content)
158
                if resource["value"] != old_resource["value"]:
159
                    print("Updating existing resource")
160
                    r = update_resource.sync_detailed(
161
                        workspace=workspace,
162
                        path=path,
163
                        json_body=UpdateResourceJsonBody.from_dict(resource),
164
                        client=client,
165
                    )
166
                    print(r)
167
                else:
168
                    print("Skipping updating identical resource")
169
            else:
170
                print("Creating new resource")
171
                r = create_resource.sync_detailed(
172
                    workspace=workspace,
173
                    json_body=CreateResourceJsonBody.from_dict(resource),
174
                    client=client,
175
                )
176
                print(r)
177

178
        if m.name.startswith(RESOURCE_TYPES_PREFIX) and m.name.endswith(".json"):
179
            path = m.name[len(RESOURCE_TYPES_PREFIX) : -len(".json")]
180
            print("Processing resource type {}, {}".format(path, m.name))
181
            get_resource_response = get_resource_type.sync_detailed(
182
                workspace=workspace, path=path, client=client
183
            )
184
            content = tar.extractfile(m).read().decode("utf-8")
185
            resource = json.loads(content)
186
            print(resource)
187
            res_error = False
188

189
            if not res_error and get_resource_response.status_code == 200:
190
                old_resource_type = json.loads(get_resource_response.content)
191
                if resource["schema"] != old_resource_type["schema"]:
192
                    print("Updating existing resource type")
193
                    r = update_resource_type.sync_detailed(
194
                        workspace=workspace,
195
                        path=path,
196
                        json_body=UpdateResourceTypeJsonBody.from_dict(resource),
197
                        client=client,
198
                    )
199
                    print(r)
200
                else:
201
                    print("Skipping updating identical resource type")
202

203
            else:
204
                print("Creating new resource type")
205
                r = create_resource_type.sync_detailed(
206
                    workspace=workspace,
207
                    json_body=CreateResourceTypeJsonBody.from_dict(resource),
208
                    client=client,
209
                )
210
                print(r)
211