24
24
"""Classes and utility functions to generate a remotely hosted cache of all addon catalog entries.
25
25
Intended to be run by a server-side systemd timer to generate a file that is then loaded by the
26
26
Addon Manager in each FreeCAD installation."""
27
- import enum
28
- import xml .etree .ElementTree
29
- from dataclasses import dataclass , asdict
30
- from typing import List , Optional
27
+ from dataclasses import is_dataclass , fields
28
+ from typing import Any , List , Optional
31
29
32
30
import base64
31
+ import enum
32
+ import hashlib
33
33
import io
34
34
import json
35
35
import os
36
36
import requests
37
37
import shutil
38
38
import subprocess
39
+ import xml .etree .ElementTree
39
40
import zipfile
40
41
41
42
import AddonCatalog
52
53
EXCLUDED_REPOS = ["parts_library" ]
53
54
54
55
55
- @dataclass
56
- class CacheEntry :
57
- """All contents of a CacheEntry are the text contents of the file listed. The icon data is
58
- base64-encoded (although it was probably an SVG, other formats are supported)."""
59
-
60
- package_xml : str = ""
61
- requirements_txt : str = ""
62
- metadata_txt : str = ""
63
- icon_data : str = ""
56
+ def recursive_serialize (obj : Any ):
57
+ """Recursively serialize an object, supporting non-dataclasses that themselves contain
58
+ dataclasses (in this case, AddonCatalog, which contains AddonCatalogEntry)"""
59
+ if is_dataclass (obj ):
60
+ result = {}
61
+ for f in fields (obj ):
62
+ value = getattr (obj , f .name )
63
+ result [f .name ] = recursive_serialize (value )
64
+ return result
65
+ elif isinstance (obj , list ):
66
+ return [recursive_serialize (i ) for i in obj ]
67
+ elif isinstance (obj , dict ):
68
+ return {k : recursive_serialize (v ) for k , v in obj .items ()}
69
+ elif hasattr (obj , "__dict__" ):
70
+ return {k : recursive_serialize (v ) for k , v in vars (obj ).items () if not k .startswith ("_" )}
71
+ else :
72
+ return obj
64
73
65
74
66
75
class GitRefType (enum .IntEnum ):
@@ -113,7 +122,14 @@ def write(self):
113
122
with zipfile .ZipFile (
114
123
os .path .join (self .cwd , "addon_catalog_cache.zip" ), "w" , zipfile .ZIP_DEFLATED
115
124
) as zipf :
116
- zipf .writestr ("cache.json" , json .dumps (self ._cache , indent = " " ))
125
+ zipf .writestr ("cache.json" , json .dumps (recursive_serialize (self .catalog ), indent = " " ))
126
+
127
+ # Also generate the sha256 hash of the zip file and store it
128
+ with open ("addon_catalog_cache.zip" , "rb" ) as cache_file :
129
+ cache_file_content = cache_file .read ()
130
+ sha256 = hashlib .sha256 (cache_file_content ).hexdigest ()
131
+ with open ("addon_catalog_cache.zip.sha256" , "w" , encoding = "utf-8" ) as hash_file :
132
+ hash_file .write (sha256 )
117
133
118
134
with open (os .path .join (self .cwd , "icon_errors.json" ), "w" ) as f :
119
135
json .dump (self .icon_errors , f , indent = " " )
@@ -146,17 +162,12 @@ def create_local_copy_of_single_addon(
146
162
"Neither git info nor zip info was specified."
147
163
)
148
164
continue
149
- entry = self .generate_cache_entry (addon_id , index , catalog_entry )
150
- if addon_id not in self ._cache :
151
- self ._cache [addon_id ] = []
152
- if entry is not None :
153
- self ._cache [addon_id ].append (asdict (entry ))
154
- else :
155
- self ._cache [addon_id ].append ({})
165
+ metadata = self .generate_cache_entry (addon_id , index , catalog_entry )
166
+ self .catalog .add_metadata_to_entry (addon_id , index , metadata )
156
167
157
168
def generate_cache_entry (
158
169
self , addon_id : str , index : int , catalog_entry : AddonCatalog .AddonCatalogEntry
159
- ) -> Optional [CacheEntry ]:
170
+ ) -> Optional [AddonCatalog . CatalogEntryMetadata ]:
160
171
"""Create the cache entry for this catalog entry if there is data to cache. If there is
161
172
nothing to cache, returns None."""
162
173
path_to_package_xml = self .find_file ("package.xml" , addon_id , index , catalog_entry )
@@ -167,23 +178,23 @@ def generate_cache_entry(
167
178
path_to_requirements = self .find_file ("requirements.txt" , addon_id , index , catalog_entry )
168
179
if path_to_requirements and os .path .exists (path_to_requirements ):
169
180
if cache_entry is None :
170
- cache_entry = CacheEntry ()
181
+ cache_entry = AddonCatalog . CatalogEntryMetadata ()
171
182
with open (path_to_requirements , "r" , encoding = "utf-8" ) as f :
172
183
cache_entry .requirements_txt = f .read ()
173
184
174
185
path_to_metadata = self .find_file ("metadata.txt" , addon_id , index , catalog_entry )
175
186
if path_to_metadata and os .path .exists (path_to_metadata ):
176
187
if cache_entry is None :
177
- cache_entry = CacheEntry ()
188
+ cache_entry = AddonCatalog . CatalogEntryMetadata ()
178
189
with open (path_to_metadata , "r" , encoding = "utf-8" ) as f :
179
190
cache_entry .metadata_txt = f .read ()
180
191
181
192
return cache_entry
182
193
183
194
def generate_cache_entry_from_package_xml (
184
195
self , path_to_package_xml : str
185
- ) -> Optional [CacheEntry ]:
186
- cache_entry = CacheEntry ()
196
+ ) -> Optional [AddonCatalog . CatalogEntryMetadata ]:
197
+ cache_entry = AddonCatalog . CatalogEntryMetadata ()
187
198
with open (path_to_package_xml , "r" , encoding = "utf-8" ) as f :
188
199
cache_entry .package_xml = f .read ()
189
200
try :
0 commit comments