├── icons
└── icon.png
├── setup.cfg
├── README.md
├── HaydeeMenuIcon.py
├── timing.py
├── .gitignore
├── __init__.py
├── HaydeePanels.py
├── HaydeeUtils.py
├── HaydeeNodeMat.py
├── HaydeeExporter.py
├── addon_updater_ops.py
└── addon_updater.py
/icons/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johnzero7/HaydeeTools/HEAD/icons/icon.png
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [pycodestyle]
2 | ignore = E501,W503
3 | statistics = false
4 | exclude = addon_upd*.py, *_obj.py
5 |
6 | [pep8]
7 | ignore = E501,W503
8 | exclude = addon_upd*.py, *_obj.py
9 |
10 | [flake8]
11 | ignore = E501,W503
12 | exclude = addon_upd*.py, *_obj.py
13 |
14 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Haydee Tools
2 | =========
3 | Blender an addon to Export/Import Haydee 1 & 2 assets.
4 |
5 | With Blender 2.80 released there where many changes.
6 |
7 | From v1.2.0 of this addon will only work with Blender 2.80+ (and viceversa).
8 | For Blender 2.79 download v1.0.6
9 |
10 | - Blender 2.80 ==> v1.2.0
11 | - Blender 2.79 ==> v1.0.6
12 |
13 | Main Features:
14 |
15 | more info at:
16 | http://johnzero7.github.io/HaydeeTools/
17 |
18 |
19 | Difuse and Emissive textures are of the common kind.
20 | Normal maps are: RRRG (B is calculated. Normals can be converted from Edith. Tool=>Import texture. Format NormalMap)
21 | Specular maps are: R=Roughness, G=Specular intensity, B=Metallic (not properly implemented in Haydee. Leave 0)
22 |
--------------------------------------------------------------------------------
/HaydeeMenuIcon.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import os
4 |
5 | # --------------------------------------------------------------------------------
6 | # Custom Icons
7 | # --------------------------------------------------------------------------------
8 | custom_icons = {}
9 |
10 |
11 | def registerCustomIcon():
12 | import bpy.utils.previews
13 | pcoll = bpy.utils.previews.new()
14 | script_path = os.path.dirname(__file__)
15 | icons_dir = os.path.join(script_path, "icons")
16 | pcoll.load("haydee_icon", os.path.join(icons_dir, "icon.png"), 'IMAGE')
17 | custom_icons["main"] = pcoll
18 |
19 |
20 | def unregisterCustomIcon():
21 | import bpy.utils.previews
22 | for pcoll in custom_icons.values():
23 | bpy.utils.previews.remove(pcoll)
24 | custom_icons.clear()
25 |
--------------------------------------------------------------------------------
/timing.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import time
4 |
5 | import io
6 | import cProfile
7 | import pstats
8 |
9 |
10 | def profile(fnc):
11 | """Create decorator function that uses cProfile to profile a function."""
12 | def inner(*args, **kwargs):
13 |
14 | pr = cProfile.Profile()
15 | pr.enable()
16 | retval = fnc(*args, **kwargs)
17 | pr.disable()
18 | s = io.StringIO()
19 | sortby = 'cumulative'
20 | ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
21 | ps.print_stats()
22 | print(s.getvalue())
23 | return retval
24 |
25 | return inner
26 |
27 |
28 | def timing(f):
29 | def wrap(*args):
30 | time1 = time.time()
31 | ret = f(*args)
32 | time2 = time.time()
33 | print('%s function took %0.3f ms' % (f.__name__,
34 | (time2 - time1) * 1000.0))
35 | return ret
36 | return wrap
37 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Windows image file caches
2 | Thumbs.db
3 | ehthumbs.db
4 |
5 | # Folder config file
6 | Desktop.ini
7 |
8 | # Recycle Bin used on file shares
9 | $RECYCLE.BIN/
10 |
11 | # Windows Installer files
12 | *.cab
13 | *.msi
14 | *.msm
15 | *.msp
16 |
17 | # Windows shortcuts
18 | *.lnk
19 |
20 | # =========================
21 | # Operating System Files
22 | # =========================
23 |
24 | # OSX
25 | # =========================
26 |
27 | .DS_Store
28 | .AppleDouble
29 | .LSOverride
30 |
31 | # Thumbnails
32 | ._*
33 |
34 | # Files that might appear on external disk
35 | .Spotlight-V100
36 | .Trashes
37 |
38 | # Directories potentially created on remote AFP share
39 | .AppleDB
40 | .AppleDesktop
41 | Network Trash Folder
42 | Temporary Items
43 | .apdisk
44 |
45 | # python outputs
46 | __pycache__/
47 | webspynner/__pychache__/
48 | *.py[cod]
49 | *.pyc
50 |
51 | # Eclipse IDE
52 | .pydevproject
53 | .project
54 | .settings/
55 |
56 | # Linux file backups
57 | *~
58 |
59 | .vscode
60 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | """Blender Addon. Haydee 1 & 2 importer/exporter."""
4 |
5 | bl_info = {
6 | "name": "Haydee 1 & 2 I/O Scripts",
7 | "author": "johnzero7, Pooka, Kein",
8 | "version": (1, 3, 1),
9 | "blender": (2, 80, 0),
10 | "location": "File > Import-Export > HaydeeTools",
11 | "description": "Import-Export scripts for Haydee",
12 | "warning": "",
13 | "wiki_url": "https://github.com/johnzero7/HaydeeTools",
14 | "tracker_url": "https://github.com/johnzero7/HaydeeTools/issues",
15 | "category": "Import-Export",
16 | }
17 |
18 |
19 | #############################################
20 | # support reloading sub-modules
21 | _modules = [
22 | 'HaydeeUtils',
23 | 'HaydeeMenuIcon',
24 | 'HaydeePanels',
25 | 'HaydeeExporter',
26 | 'HaydeeImporter',
27 | 'HaydeeNodeMat',
28 | 'addon_updater_ops',
29 | ]
30 |
31 | # Reload previously loaded modules
32 | if "bpy" in locals():
33 | from importlib import reload
34 | _modules_loaded[:] = [reload(module) for module in _modules_loaded]
35 | del reload
36 |
37 |
38 | # First import the modules
39 | __import__(name=__name__, fromlist=_modules)
40 | _namespace = globals()
41 | _modules_loaded = [_namespace[name] for name in _modules]
42 | del _namespace
43 | # support reloading sub-modules
44 | #############################################
45 |
46 | import bpy
47 |
48 |
49 | class UpdaterPreferences(bpy.types.AddonPreferences):
50 | """Updater Class."""
51 |
52 | bl_idname = __package__
53 |
54 | # addon updater preferences from `__init__`, be sure to copy all of them
55 | auto_check_update: bpy.props.BoolProperty(
56 | name="Auto-check for Update",
57 | description="If enabled, auto-check for updates using an interval",
58 | default=False,
59 | )
60 | updater_interval_months: bpy.props.IntProperty(
61 | name='Months',
62 | description="Number of months between checking for updates",
63 | default=0,
64 | min=0
65 | )
66 | updater_interval_days: bpy.props.IntProperty(
67 | name='Days',
68 | description="Number of days between checking for updates",
69 | default=7,
70 | min=0,
71 | )
72 | updater_interval_hours: bpy.props.IntProperty(
73 | name='Hours',
74 | description="Number of hours between checking for updates",
75 | default=0,
76 | min=0,
77 | max=23
78 | )
79 | updater_interval_minutes: bpy.props.IntProperty(
80 | name='Minutes',
81 | description="Number of minutes between checking for updates",
82 | default=0,
83 | min=0,
84 | max=59
85 | )
86 |
87 | def draw(self, context):
88 | """Draw Method."""
89 | addon_updater_ops.update_settings_ui(self, context)
90 |
91 | #
92 | # Registration
93 | #
94 |
95 |
96 | classesToRegister = [
97 | UpdaterPreferences,
98 | HaydeePanels.HaydeeToolsImportPanel,
99 | HaydeePanels.HaydeeToolsExportPanel,
100 | HaydeePanels.HaydeeToolsSkelPanel,
101 |
102 | HaydeeExporter.ExportHaydeeDSkel,
103 | HaydeeExporter.ExportHaydeeDPose,
104 | HaydeeExporter.ExportHaydeeDMotion,
105 | HaydeeExporter.ExportHaydeeDMesh,
106 | HaydeeExporter.HaydeeExportSubMenu,
107 |
108 | HaydeeImporter.ImportHaydeeSkel,
109 | HaydeeImporter.ImportHaydeeDSkel,
110 | HaydeeImporter.ImportHaydeeDMesh,
111 | HaydeeImporter.ImportHaydeeMesh,
112 | HaydeeImporter.ImportHaydeeMotion,
113 | HaydeeImporter.ImportHaydeeDMotion,
114 | HaydeeImporter.ImportHaydeePose,
115 | HaydeeImporter.ImportHaydeeDPose,
116 | HaydeeImporter.ImportHaydeeOutfit,
117 | HaydeeImporter.ImportHaydeeSkin,
118 | HaydeeImporter.ImportHaydeeMaterial,
119 | HaydeeImporter.HaydeeImportSubMenu,
120 |
121 | HaydeeUtils.HaydeeToolFitArmature_Op,
122 | HaydeeUtils.HaydeeToolFitMesh_Op,
123 | ]
124 |
125 |
126 | # Use factory to create method to register and unregister the classes
127 | registerClasses, unregisterClasses = bpy.utils.register_classes_factory(classesToRegister)
128 |
129 |
130 | def register():
131 | """Register addon classes."""
132 | HaydeeMenuIcon.registerCustomIcon()
133 | registerClasses()
134 | HaydeeExporter.register()
135 | HaydeeImporter.register()
136 | addon_updater_ops.register(bl_info)
137 |
138 |
139 | def unregister():
140 | """Unregister addon classes."""
141 | addon_updater_ops.unregister()
142 | HaydeeExporter.unregister()
143 | HaydeeImporter.unregister()
144 | unregisterClasses()
145 | HaydeeMenuIcon.unregisterCustomIcon()
146 |
147 |
148 | if __name__ == "__main__":
149 | register()
150 |
151 | # call exporter
152 | # bpy.ops.xps_tools.export_model('INVOKE_DEFAULT')
153 |
154 | # call importer
155 | # bpy.ops.xps_tools.import_model('INVOKE_DEFAULT')
156 |
--------------------------------------------------------------------------------
/HaydeePanels.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import bpy
4 |
5 |
6 | class _HaydeeToolsPanel():
7 | bl_space_type = 'VIEW_3D'
8 | bl_region_type = 'UI'
9 | bl_category = 'Haydee'
10 | bl_context = 'objectmode'
11 |
12 |
13 | class HaydeeToolsImportPanel(_HaydeeToolsPanel, bpy.types.Panel):
14 | """Haydee Import Toolshelf."""
15 |
16 | bl_idname = 'OBJECT_PT_haydee_import_tools_object'
17 | bl_label = 'Haydee Import Tools'
18 |
19 | def draw(self, context):
20 | layout = self.layout
21 | col = layout.column()
22 |
23 | col.label(text='Outfit:')
24 | # c = col.column()
25 | r = col.row(align=True)
26 | r1c1 = r.column(align=True)
27 | r1c1.operator("haydee_importer.outfit", text='Outfit', icon='NONE')
28 |
29 | # col.separator()
30 | col = layout.column()
31 |
32 | col.label(text='Mesh:')
33 | # c = col.column()
34 | r = col.row(align=True)
35 | r1c1 = r.column(align=True)
36 | r1c1.operator("haydee_importer.dmesh", text='DMesh', icon='NONE')
37 | r1c2 = r.column(align=True)
38 | r1c2.operator('haydee_importer.mesh', text='Mesh')
39 |
40 | # col.separator()
41 | col = layout.column()
42 |
43 | col.label(text='Skeleton:')
44 | # c = col.column()
45 | r = col.row(align=True)
46 | r1c1 = r.column(align=True)
47 | r1c1.operator("haydee_importer.dskel", text='DSkel', icon='NONE')
48 | r1c2 = r.column(align=True)
49 | r1c2.operator('haydee_importer.skel', text='Skel')
50 |
51 | # col.separator()
52 | col = layout.column()
53 |
54 | col.label(text='Motion:')
55 | # c = col.column()
56 | r = col.row(align=True)
57 | r1c1 = r.column(align=True)
58 | r1c1.operator("haydee_importer.dmot", text='DMotion', icon='NONE')
59 | r1c2 = r.column(align=True)
60 | r1c2.operator('haydee_importer.motion', text='Motion')
61 |
62 | # col.separator()
63 | col = layout.column()
64 |
65 | col.label(text='Pose:')
66 | # c = col.column()
67 | r = col.row(align=True)
68 | r1c1 = r.column(align=True)
69 | r1c1.operator("haydee_importer.dpose", text='DPose', icon='NONE')
70 | r1c2 = r.column(align=True)
71 | r1c2.operator('haydee_importer.pose', text='Pose')
72 |
73 | # col.separator()
74 | col = layout.column()
75 |
76 | col.label(text='Skin:')
77 | # c = col.column()
78 | r = col.row(align=True)
79 | r1c1 = r.column(align=True)
80 | r1c1.operator("haydee_importer.skin", text='Skin', icon='NONE')
81 |
82 | # col.separator()
83 | col = layout.column()
84 |
85 | col.label(text='Material:')
86 | # c = col.column()
87 | r = col.row(align=True)
88 | r1c1 = r.column(align=True)
89 | r1c1.operator("haydee_importer.material", text='Material', icon='NONE')
90 |
91 |
92 | class HaydeeToolsExportPanel(_HaydeeToolsPanel, bpy.types.Panel):
93 | """Haydee Export Tools."""
94 |
95 | bl_idname = 'OBJECT_PT_haydee_export_tools_object'
96 | bl_label = 'Haydee Export Tools'
97 |
98 | def draw(self, context):
99 | layout = self.layout
100 | col = layout.column()
101 |
102 | col.label(text='Mesh:')
103 | # c = col.column()
104 | r = col.row(align=True)
105 | r.operator("haydee_exporter.dmesh", text='DMesh', icon='NONE')
106 |
107 | # col.separator()
108 | col = layout.column()
109 |
110 | col.label(text="Skeleton:")
111 | c = col.column()
112 | r = c.row(align=True)
113 | r2c1 = r.column(align=True)
114 | r2c1.operator('haydee_exporter.dskel', text='DSkel')
115 | # r2c2 = r.column(align=True)
116 | # r2c2.operator('haydee_exporter.skeleton', text='Skel')
117 |
118 | # col.separator()
119 | col = layout.column()
120 |
121 | col.label(text='Pose:')
122 | c = col.column(align=True)
123 | c.operator('haydee_exporter.dpose', text='DPose')
124 |
125 | # col.separator()
126 | col = layout.column()
127 |
128 | col.label(text='Motion:')
129 | c = col.column(align=True)
130 | c.operator('haydee_exporter.dmot', text='DMot')
131 |
132 |
133 | class HaydeeToolsSkelPanel(_HaydeeToolsPanel, bpy.types.Panel):
134 | """Haydee Adjust Armature Toolshelf."""
135 |
136 | bl_idname = 'OBJECT_PT_haydee_skel_tools_object'
137 | bl_label = 'Haydee Skel Tools'
138 |
139 | def draw(self, context):
140 | layout = self.layout
141 | col = layout.column()
142 |
143 | col.label(text='Fit Armature/Mesh:')
144 | # c = col.column()
145 | r = col.row(align=True)
146 | r1c1 = r.column(align=True)
147 | r1c1.operator("haydee_tools.fit_to_armature", text='To Armature', icon='NONE')
148 | r1c2 = r.column(align=True)
149 | r1c2.operator('haydee_tools.fit_to_mesh', text='To Mesh')
150 |
--------------------------------------------------------------------------------
/HaydeeUtils.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import bpy
4 | from bpy.props import EnumProperty
5 |
6 | NAME_LIMIT = 31
7 |
8 | file_format_prop = EnumProperty(
9 | name="File Format",
10 | description="Select file format Haydee 1 / Haydee 2 (flipped UV)",
11 | items=(('H1', 'Haydee 1', 'Use Haydee 1 File Format'),
12 | ('H2', 'Haydee 2', 'Use Haydee 2 File Format'),
13 | ),
14 | default='H2',
15 | )
16 |
17 |
18 | def boneRenameBlender(bone_name):
19 | name = bone_name
20 | if name.startswith("SK_R_"):
21 | name = "SK_" + name[5:] + "_R"
22 | if name.startswith("SK_L_"):
23 | name = "SK_" + name[5:] + "_L"
24 | return stripName(name)
25 |
26 |
27 | def boneRenameHaydee(bone_name):
28 | name = bone_name
29 | if name.startswith("SK_") and name.endswith("_R"):
30 | name = "SK_R_" + name[3:-2]
31 | if name.startswith("SK_") and name.endswith("_L"):
32 | name = "SK_L_" + name[3:-2]
33 | return stripName(name)[:NAME_LIMIT]
34 |
35 |
36 | def stripName(name):
37 | return name.replace(" ", "_").replace("*", "_").replace("-", "_")
38 |
39 |
40 | def decodeText(text):
41 | return text.decode('latin1').split('\0', 1)[0]
42 |
43 |
44 | def d(number):
45 | r = ('%.6f' % number).rstrip('0').rstrip('.')
46 | if r == "-0":
47 | return "0"
48 | return r
49 |
50 | # --------------------------------------------------------------------------------
51 | # Finds a suitable armature in the current selection or scene
52 | # --------------------------------------------------------------------------------
53 |
54 |
55 | def find_armature(operator, context):
56 | armature = None
57 | checking = "ARMATURE"
58 | obj_list = [context.active_object, ] if context.active_object.type == checking else None
59 | if not obj_list:
60 | obj_list = context.selected_objects
61 | if not obj_list:
62 | obj_list = context.scene.objects
63 | while True:
64 | for ob in obj_list:
65 | if ob.type == checking:
66 | if checking == "MESH":
67 | armature = ob.find_armature()
68 | if armature:
69 | ob = armature
70 | break
71 | if ob.type != 'ARMATURE':
72 | continue
73 | if armature is not None and armature != ob:
74 | operator.report({'ERROR'}, "Multiples armatures found, please select a single one and try again")
75 | armature = ob
76 | if armature is not None:
77 | return armature
78 | if checking == "ARMATURE":
79 | checking = "MESH"
80 | else:
81 | operator.report({'ERROR'}, "No armature found in scene" if obj_list == context.scene.objects else "No armature or weighted mesh selected")
82 | return None
83 |
84 |
85 | def materials_list(a, b):
86 | materials = {}
87 | for ob in bpy.context.scene.objects:
88 | if ob.type == "MESH":
89 | for material_slot in ob.material_slots:
90 | materials[material_slot.name] = True
91 | mat_list = [('__ALL__', 'Export all materials', '')]
92 | for name in materials.keys():
93 | mat_list.append((name, name, ''))
94 | return mat_list
95 |
96 |
97 | def fit_to_armature():
98 | """Fit selected armatures to the active armature.
99 |
100 | Replaces selected armature with active armature.
101 | Also modifies the pose of the meshes.
102 | """
103 | active = bpy.context.active_object
104 | if not (active and active.type == 'ARMATURE'):
105 | return {'FINISHED'}
106 | selected = next((armature for armature in bpy.context.selected_objects if (armature.type == 'ARMATURE' and armature != active)), None)
107 | if not (selected and selected.type == 'ARMATURE'):
108 | return {'FINISHED'}
109 | match_to_armature(selected, active)
110 | apply_pose(selected, active)
111 | bpy.data.armatures.remove(selected.data, do_unlink=True)
112 | return {'FINISHED'}
113 |
114 |
115 | def match_to_armature(armature, target):
116 | for pose_bone in armature.pose.bones:
117 | if target.pose.bones.get(pose_bone.name):
118 | constraint = pose_bone.constraints.new('COPY_TRANSFORMS')
119 | constraint.target = target
120 | constraint.subtarget = pose_bone.name
121 |
122 |
123 | def apply_pose(selected, active):
124 | objs = [obj for obj in bpy.data.objects if (obj.parent == selected)]
125 | modifiers = [modif for obj in bpy.data.objects for modif in obj.modifiers if (modif.type == 'ARMATURE' and modif.object == selected)]
126 | for obj in objs:
127 | obj.parent = active
128 | for modif in modifiers:
129 | obj = modif.id_data
130 | bpy.context.view_layer.objects.active = obj
131 | index = obj.modifiers.find(modif.name)
132 | bpy.ops.object.modifier_copy(modifier=modif.name)
133 | new_modif_name = obj.modifiers[index + 1].name
134 | bpy.ops.object.modifier_apply(apply_as='DATA', modifier=new_modif_name)
135 | modif.object = active
136 | bpy.context.view_layer.objects.active = active
137 |
138 |
139 | def fit_to_mesh():
140 | """Fit selected armatures to active."""
141 | active = bpy.context.active_object
142 | if not (active and active.type == 'ARMATURE'):
143 | return {'FINISHED'}
144 | selected = next((armature for armature in bpy.context.selected_objects if (armature.type == 'ARMATURE' and armature != active)), None)
145 | if not (selected and selected.type == 'ARMATURE'):
146 | return {'FINISHED'}
147 | match_to_armature(active, selected)
148 | new_rest_pose(selected, active)
149 | bpy.data.armatures.remove(selected.data, do_unlink=True)
150 | return {'FINISHED'}
151 |
152 |
153 | def new_rest_pose(selected, active):
154 | bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
155 | bpy.context.view_layer.objects.active = active
156 | bpy.ops.object.mode_set(mode='POSE', toggle=False)
157 | bpy.ops.pose.armature_apply()
158 | for pose_bone in active.pose.bones:
159 | for constraint in pose_bone.constraints:
160 | if constraint.type == 'COPY_TRANSFORMS':
161 | pose_bone.constraints.remove(constraint)
162 | bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
163 |
164 | objs = [obj for obj in bpy.data.objects if (obj.parent == selected)]
165 | modifiers = [modif for obj in bpy.data.objects for modif in obj.modifiers if (modif.type == 'ARMATURE' and modif.object == selected)]
166 | for obj in objs:
167 | obj.parent = active
168 | for modif in modifiers:
169 | modif.object = active
170 |
171 |
172 | class HaydeeToolFitArmature_Op(bpy.types.Operator):
173 | bl_idname = 'haydee_tools.fit_to_armature'
174 | bl_label = 'Cycles'
175 | bl_description = 'Select the mesh armature then the haydee Skel. Raplces the Armature with the skel. Uses the Skel pose'
176 | bl_options = {'PRESET'}
177 |
178 | def execute(self, context):
179 | fit_to_armature()
180 | return {'FINISHED'}
181 |
182 |
183 | class HaydeeToolFitMesh_Op(bpy.types.Operator):
184 | bl_idname = 'haydee_tools.fit_to_mesh'
185 | bl_label = 'Cycles'
186 | bl_description = 'Select the mesh armature then the haydee Skel. Raplces the Armature with the skel. Uses the Armature pose'
187 | bl_options = {'PRESET'}
188 |
189 | def execute(self, context):
190 | fit_to_mesh()
191 | return {'FINISHED'}
192 |
--------------------------------------------------------------------------------
/HaydeeNodeMat.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import bpy
4 | import os
5 | from mathutils import Vector
6 |
7 | COLOR_SPACE_NONE = 'Non-Color'
8 | ALPHA_MODE_CHANNEL = 'CHANNEL_PACKED'
9 |
10 | # Nodes Layout
11 | NODE_FRAME = 'NodeFrame'
12 |
13 | # Nodes Shaders
14 | BSDF_DIFFUSE_NODE = 'ShaderNodeBsdfDiffuse'
15 | BSDF_EMISSION_NODE = 'ShaderNodeEmission'
16 | BSDF_GLOSSY_NODE = 'ShaderNodeBsdfGlossy'
17 | PRINCIPLED_SHADER_NODE = 'ShaderNodeBsdfPrincipled'
18 | BSDF_TRANSPARENT_NODE = 'ShaderNodeBsdfTransparent'
19 | SHADER_ADD_NODE = 'ShaderNodeAddShader'
20 | SHADER_MIX_NODE = 'ShaderNodeMixShader'
21 |
22 | # Nodes Color
23 | RGB_MIX_NODE = 'ShaderNodeMixRGB'
24 | INVERT_NODE = 'ShaderNodeInvert'
25 |
26 | # Nodes Input
27 | TEXTURE_IMAGE_NODE = 'ShaderNodeTexImage'
28 | SHADER_NODE_FRESNEL = 'ShaderNodeFresnel'
29 | SHADER_NODE_NEW_GEOMETRY = 'ShaderNodeNewGeometry'
30 |
31 | # Nodes Outputs
32 | OUTPUT_NODE = 'ShaderNodeOutputMaterial'
33 |
34 | # Nodes Vector
35 | NORMAL_MAP_NODE = 'ShaderNodeNormalMap'
36 |
37 | # Nodes Convert
38 | SHADER_NODE_MATH = 'ShaderNodeMath'
39 | SHADER_NODE_SEPARATE_RGB = 'ShaderNodeSeparateRGB'
40 | SHADER_NODE_COMBINE_RGB = 'ShaderNodeCombineRGB'
41 |
42 | # Node Groups
43 | NODE_GROUP = 'ShaderNodeGroup'
44 | NODE_GROUP_INPUT = 'NodeGroupInput'
45 | NODE_GROUP_OUTPUT = 'NodeGroupOutput'
46 | SHADER_NODE_TREE = 'ShaderNodeTree'
47 | # Node Custom Groups
48 | HAYDEE_NORMAL_NODE = 'Haydee Normal'
49 |
50 | # Sockets
51 | NODE_SOCKET_COLOR = 'NodeSocketColor'
52 | NODE_SOCKET_FLOAT = 'NodeSocketFloat'
53 | NODE_SOCKET_FLOAT_FACTOR = 'NodeSocketFloatFactor'
54 | NODE_SOCKET_SHADER = 'NodeSocketShader'
55 | NODE_SOCKET_VECTOR = 'NodeSocketVector'
56 |
57 | DEFAULT_PBR_POWER = .5
58 |
59 |
60 | def load_image(textureFilepath, forceNewTexture=False):
61 | image = None
62 | if textureFilepath:
63 | textureFilename = os.path.basename(textureFilepath)
64 | fileRoot, fileExt = os.path.splitext(textureFilename)
65 |
66 | if (os.path.exists(textureFilepath)):
67 | print("Loading Texture: " + textureFilename)
68 | image = bpy.data.images.load(filepath=textureFilepath, check_existing=not forceNewTexture)
69 | else:
70 | print("Warning. Texture not found " + textureFilename)
71 | image = bpy.data.images.new(
72 | name=textureFilename, width=1024, height=1024, alpha=True,
73 | float_buffer=False)
74 | image.source = 'FILE'
75 | image.filepath = textureFilepath
76 | image.alpha_mode = ALPHA_MODE_CHANNEL
77 |
78 | return image
79 |
80 |
81 | def create_material(obj, useAlpha, mat_name, diffuseFile, normalFile, specularFile, emissionFile):
82 | obj.data.materials.clear()
83 |
84 | material = bpy.data.materials.get(mat_name)
85 | if not material:
86 | material = bpy.data.materials.new(mat_name)
87 | material.use_nodes = True
88 | if useAlpha:
89 | material.blend_method = 'BLEND'
90 | obj.data.materials.append(material)
91 |
92 | create_cycle_node_material(material, useAlpha, diffuseFile, normalFile, specularFile, emissionFile)
93 |
94 |
95 | def create_cycle_node_material(material, useAlpha, diffuseFile, normalFile, specularFile, emissionFile):
96 | # Nodes
97 | node_tree = material.node_tree
98 | node_tree.nodes.clear()
99 | col_width = 200
100 |
101 | diffuseTextureNode = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
102 | diffuseTextureNode.label = 'Diffuse'
103 | diffuseTextureNode.image = load_image(diffuseFile)
104 | diffuseTextureNode.location = Vector((0, 0))
105 |
106 | specularTextureNode = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
107 | specularTextureNode.label = 'Roughness Specular Metalic'
108 | specularTextureNode.image = load_image(specularFile)
109 | if (specularTextureNode.image):
110 | try:
111 | specularTextureNode.image.colorspace_settings.name = COLOR_SPACE_NONE
112 | except TypeError:
113 | print('Warning : Could not set Image property')
114 | specularTextureNode.location = diffuseTextureNode.location + Vector((0, -450))
115 |
116 | normalTextureRgbNode = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
117 | normalTextureRgbNode.label = 'Haydee Normal'
118 | normalTextureRgbNode.image = load_image(normalFile)
119 | if normalTextureRgbNode.image:
120 | try:
121 | normalTextureRgbNode.image.colorspace_settings.name = COLOR_SPACE_NONE
122 | except TypeError:
123 | print('Warning : Could not set Image property')
124 | normalTextureRgbNode.location = specularTextureNode.location + Vector((0, -300))
125 |
126 | normalTextureAlphaNode = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
127 | normalTextureAlphaNode.label = 'Haydee Normal Alpha'
128 | normalTextureAlphaNode.image = load_image(normalFile, True)
129 | if normalTextureAlphaNode.image:
130 | try:
131 | normalTextureAlphaNode.image.colorspace_settings.name = COLOR_SPACE_NONE
132 | except TypeError:
133 | print('Warning : Could not set Image property')
134 | normalTextureAlphaNode.location = specularTextureNode.location + Vector((0, -600))
135 |
136 | haydeeNormalMapNode = node_tree.nodes.new(NODE_GROUP)
137 | haydeeNormalMapNode.label = 'Haydee Normal Converter'
138 | haydeeNormalMapNode.node_tree = haydee_normal_map()
139 | haydeeNormalMapNode.location = normalTextureRgbNode.location + Vector((col_width * 1.5, 0))
140 | normalMapNode = node_tree.nodes.new(NORMAL_MAP_NODE)
141 | normalMapNode.location = haydeeNormalMapNode.location + Vector((col_width, 100))
142 |
143 | emissionTextureNode = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
144 | emissionTextureNode.label = 'Emission'
145 | emissionTextureNode.image = load_image(emissionFile)
146 | emissionTextureNode.location = diffuseTextureNode.location + Vector((0, 260))
147 |
148 | separateRgbNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
149 | separateRgbNode.location = specularTextureNode.location + Vector((col_width * 1.5, 60))
150 |
151 | roughnessPowerNode = node_tree.nodes.new(SHADER_NODE_MATH)
152 | roughnessPowerNode.operation = 'POWER'
153 | roughnessPowerNode.inputs[1].default_value = DEFAULT_PBR_POWER
154 | roughnessPowerNode.location = separateRgbNode.location + Vector((col_width, 200))
155 | specPowerNode = node_tree.nodes.new(SHADER_NODE_MATH)
156 | specPowerNode.operation = 'POWER'
157 | specPowerNode.inputs[1].default_value = DEFAULT_PBR_POWER
158 | specPowerNode.location = separateRgbNode.location + Vector((col_width, 50))
159 | metallicPowerNode = node_tree.nodes.new(SHADER_NODE_MATH)
160 | metallicPowerNode.operation = 'POWER'
161 | metallicPowerNode.inputs[1].default_value = DEFAULT_PBR_POWER
162 | metallicPowerNode.location = separateRgbNode.location + Vector((col_width, -100))
163 |
164 | alphaMixNode = None
165 | transparencyNode = None
166 | pbrShaderNode = None
167 | pbrColorInput = None
168 | pbrRoughnessInput = None
169 | pbrReflectionInput = None
170 | pbrMetallicInput = None
171 | pbrShaderNode = node_tree.nodes.new(PRINCIPLED_SHADER_NODE)
172 | # pbrShaderNode.location = roughnessPowerNode.location + Vector((200, 100))
173 | pbrShaderNode.location = diffuseTextureNode.location + Vector((col_width * 4, -100))
174 | pbrColorInput = 'Base Color'
175 | pbrRoughnessInput = 'Roughness'
176 | pbrReflectionInput = 'Specular'
177 | pbrMetallicInput = 'Metallic'
178 |
179 | emissionNode = node_tree.nodes.new(BSDF_EMISSION_NODE)
180 | emissionNode.inputs['Color'].default_value = (0, 0, 0, 1)
181 | emissionNode.location = pbrShaderNode.location + Vector((100, 100))
182 | addShaderNode = node_tree.nodes.new(SHADER_ADD_NODE)
183 | addShaderNode.location = emissionNode.location + Vector((250, 0))
184 | outputNode = node_tree.nodes.new(OUTPUT_NODE)
185 | outputNode.location = addShaderNode.location + Vector((500, 200))
186 | if useAlpha:
187 | alphaMixNode = node_tree.nodes.new(SHADER_MIX_NODE)
188 | alphaMixNode.location = pbrShaderNode.location + Vector((600, 300))
189 | transparencyNode = node_tree.nodes.new(BSDF_TRANSPARENT_NODE)
190 | transparencyNode.location = alphaMixNode.location + Vector((-250, -100))
191 |
192 | # Links Input
193 | links = node_tree.links
194 | if emissionFile and os.path.exists(emissionFile):
195 | links.new(emissionTextureNode.outputs['Color'], emissionNode.inputs['Color'])
196 | links.new(diffuseTextureNode.outputs['Color'], pbrShaderNode.inputs[pbrColorInput])
197 | links.new(specularTextureNode.outputs['Color'], separateRgbNode.inputs['Image'])
198 | if normalFile and os.path.exists(normalFile):
199 | links.new(normalTextureRgbNode.outputs['Color'], haydeeNormalMapNode.inputs['Color'])
200 | links.new(normalTextureAlphaNode.outputs['Alpha'], haydeeNormalMapNode.inputs['Alpha'])
201 | links.new(haydeeNormalMapNode.outputs['Normal'], normalMapNode.inputs['Color'])
202 |
203 | links.new(emissionNode.outputs['Emission'], addShaderNode.inputs[0])
204 | links.new(addShaderNode.outputs['Shader'], outputNode.inputs['Surface'])
205 |
206 | if useAlpha:
207 | links.new(diffuseTextureNode.outputs['Alpha'], alphaMixNode.inputs['Fac'])
208 | links.new(transparencyNode.outputs['BSDF'], alphaMixNode.inputs[1])
209 | links.new(addShaderNode.outputs['Shader'], alphaMixNode.inputs[2])
210 | links.new(alphaMixNode.outputs['Shader'], outputNode.inputs['Surface'])
211 |
212 | links.new(specularTextureNode.outputs['Color'], separateRgbNode.inputs['Image'])
213 | links.new(separateRgbNode.outputs['R'], roughnessPowerNode.inputs[0])
214 | links.new(separateRgbNode.outputs['G'], specPowerNode.inputs[0])
215 | links.new(separateRgbNode.outputs['B'], metallicPowerNode.inputs[0])
216 |
217 | if specularFile and os.path.exists(specularFile):
218 | links.new(roughnessPowerNode.outputs[0], pbrShaderNode.inputs[pbrRoughnessInput])
219 | links.new(specPowerNode.outputs[0], pbrShaderNode.inputs[pbrReflectionInput])
220 | if pbrMetallicInput:
221 | links.new(metallicPowerNode.outputs[0], pbrShaderNode.inputs[pbrMetallicInput])
222 | links.new(normalMapNode.outputs['Normal'], pbrShaderNode.inputs['Normal'])
223 |
224 | links.new(pbrShaderNode.outputs[0], addShaderNode.inputs[1])
225 |
226 |
227 | def haydee_normal_map():
228 | if HAYDEE_NORMAL_NODE in bpy.data.node_groups:
229 | return bpy.data.node_groups[HAYDEE_NORMAL_NODE]
230 |
231 | # create a group
232 | node_tree = bpy.data.node_groups.new(HAYDEE_NORMAL_NODE, SHADER_NODE_TREE)
233 |
234 | separateRgbNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
235 | separateRgbNode.location = Vector((0, 0))
236 | invertRNode = node_tree.nodes.new(INVERT_NODE)
237 | invertRNode.inputs[0].default_value = 0
238 | invertRNode.location = separateRgbNode.location + Vector((200, 40))
239 | invertGNode = node_tree.nodes.new(INVERT_NODE)
240 | invertGNode.inputs[0].default_value = 1
241 | invertGNode.location = separateRgbNode.location + Vector((200, -60))
242 |
243 | SpaceChange = node_tree.nodes.new(NODE_FRAME)
244 | SpaceChange.name = 'R & G Space Change'
245 | SpaceChange.label = 'R & G Space Change'
246 | mathMultiplyRNode = node_tree.nodes.new(SHADER_NODE_MATH)
247 | mathMultiplyRNode.parent = SpaceChange
248 | mathMultiplyRNode.operation = 'MULTIPLY'
249 | mathMultiplyRNode.inputs[1].default_value = 2
250 | mathMultiplyRNode.location = invertGNode.location + Vector((250, -100))
251 | mathMultiplyGNode = node_tree.nodes.new(SHADER_NODE_MATH)
252 | mathMultiplyGNode.parent = SpaceChange
253 | mathMultiplyGNode.operation = 'MULTIPLY'
254 | mathMultiplyGNode.inputs[1].default_value = 2
255 | mathMultiplyGNode.location = invertGNode.location + Vector((250, -250))
256 |
257 | mathSubstractRNode = node_tree.nodes.new(SHADER_NODE_MATH)
258 | mathSubstractRNode.parent = SpaceChange
259 | mathSubstractRNode.operation = 'SUBTRACT'
260 | mathSubstractRNode.inputs[1].default_value = 1
261 | mathSubstractRNode.location = mathMultiplyRNode.location + Vector((200, 0))
262 | mathSubstractGNode = node_tree.nodes.new(SHADER_NODE_MATH)
263 | mathSubstractGNode.parent = SpaceChange
264 | mathSubstractGNode.operation = 'SUBTRACT'
265 | mathSubstractGNode.inputs[1].default_value = 1
266 | mathSubstractGNode.location = mathMultiplyGNode.location + Vector((200, 0))
267 |
268 | BCalc = node_tree.nodes.new(NODE_FRAME)
269 | BCalc.name = 'B Calc'
270 | BCalc.label = 'B Calc'
271 | mathPowerRNode = node_tree.nodes.new(SHADER_NODE_MATH)
272 | mathPowerRNode.parent = BCalc
273 | mathPowerRNode.operation = 'POWER'
274 | mathPowerRNode.inputs[1].default_value = 2
275 | mathPowerRNode.location = mathSubstractRNode.location + Vector((200, 0))
276 | mathPowerGNode = node_tree.nodes.new(SHADER_NODE_MATH)
277 | mathPowerGNode.parent = BCalc
278 | mathPowerGNode.operation = 'POWER'
279 | mathPowerGNode.inputs[1].default_value = 2
280 | mathPowerGNode.location = mathSubstractGNode.location + Vector((200, 0))
281 |
282 | mathAddNode = node_tree.nodes.new(SHADER_NODE_MATH)
283 | mathAddNode.parent = BCalc
284 | mathAddNode.operation = 'ADD'
285 | mathAddNode.location = mathPowerGNode.location + Vector((200, 60))
286 |
287 | mathSubtractNode = node_tree.nodes.new(SHADER_NODE_MATH)
288 | mathSubtractNode.parent = BCalc
289 | mathSubtractNode.operation = 'SUBTRACT'
290 | mathSubtractNode.inputs[0].default_value = 1
291 | mathSubtractNode.location = mathAddNode.location + Vector((200, 0))
292 |
293 | mathRootNode = node_tree.nodes.new(SHADER_NODE_MATH)
294 | mathRootNode.parent = BCalc
295 | mathRootNode.operation = 'POWER'
296 | mathRootNode.inputs[1].default_value = .5
297 | mathRootNode.location = mathSubtractNode.location + Vector((200, 0))
298 |
299 | combineRgbNode = node_tree.nodes.new(SHADER_NODE_COMBINE_RGB)
300 | combineRgbNode.location = mathRootNode.location + Vector((200, 230))
301 |
302 | # Input/Output
303 | group_inputs = node_tree.nodes.new(NODE_GROUP_INPUT)
304 | group_inputs.location = separateRgbNode.location + Vector((-200, -100))
305 | group_outputs = node_tree.nodes.new(NODE_GROUP_OUTPUT)
306 | group_outputs.location = combineRgbNode.location + Vector((200, 0))
307 |
308 | # group_inputs.inputs.new(NODE_SOCKET_SHADER,'Shader')
309 | input_color = node_tree.inputs.new(NODE_SOCKET_COLOR, 'Color')
310 | input_color.default_value = (.5, .5, .5, 1)
311 | input_alpha = node_tree.inputs.new(NODE_SOCKET_COLOR, 'Alpha')
312 | input_alpha.default_value = (.5, .5, .5, 1)
313 | output_value = node_tree.outputs.new(NODE_SOCKET_COLOR, 'Normal')
314 |
315 | # Links Input
316 | links = node_tree.links
317 | links.new(group_inputs.outputs['Color'], separateRgbNode.inputs['Image'])
318 | links.new(group_inputs.outputs['Alpha'], invertGNode.inputs['Color'])
319 | links.new(separateRgbNode.outputs['R'], invertRNode.inputs['Color'])
320 |
321 | links.new(invertRNode.outputs['Color'], mathMultiplyRNode.inputs[0])
322 | links.new(invertGNode.outputs['Color'], mathMultiplyGNode.inputs[0])
323 | links.new(mathMultiplyRNode.outputs[0], mathSubstractRNode.inputs[0])
324 | links.new(mathMultiplyGNode.outputs[0], mathSubstractGNode.inputs[0])
325 | links.new(mathSubstractRNode.outputs[0], mathPowerRNode.inputs[0])
326 | links.new(mathSubstractGNode.outputs[0], mathPowerGNode.inputs[0])
327 | links.new(mathPowerRNode.outputs['Value'], mathAddNode.inputs[0])
328 | links.new(mathPowerGNode.outputs['Value'], mathAddNode.inputs[1])
329 | links.new(mathAddNode.outputs['Value'], mathSubtractNode.inputs[1])
330 | links.new(mathSubtractNode.outputs['Value'], mathRootNode.inputs[0])
331 |
332 | links.new(invertRNode.outputs['Color'], combineRgbNode.inputs['R'])
333 | links.new(invertGNode.outputs['Color'], combineRgbNode.inputs['G'])
334 | links.new(mathRootNode.outputs['Value'], combineRgbNode.inputs['B'])
335 |
336 | links.new(combineRgbNode.outputs['Image'], group_outputs.inputs['Normal'])
337 |
338 | return node_tree
339 |
--------------------------------------------------------------------------------
/HaydeeExporter.py:
--------------------------------------------------------------------------------
1 | #
2 |
3 | import bpy
4 | import os
5 | import re
6 | from .HaydeeUtils import d, find_armature, file_format_prop
7 | from .HaydeeUtils import boneRenameHaydee, materials_list, stripName, NAME_LIMIT
8 | from . import HaydeeMenuIcon
9 | from bpy_extras.wm_utils.progress_report import (
10 | ProgressReport,
11 | ProgressReportSubstep,
12 | )
13 |
14 | # ExportHelper is a helper class, defines filename and
15 | # invoke() function which calls the file selector.
16 | from bpy_extras.io_utils import ExportHelper
17 | from bpy.props import StringProperty, BoolProperty, EnumProperty
18 | from bpy.types import Operator
19 | from mathutils import Quaternion, Vector, Matrix
20 | from math import pi
21 |
22 |
23 | # ------------------------------------------------------------------------------
24 | # .dskel exporter
25 | # ------------------------------------------------------------------------------
26 |
27 | def write_dskel(operator, context, filepath):
28 | armature = find_armature(operator, context)
29 | if armature is None:
30 | return {'FINISHED'}
31 |
32 | bones = armature.data.bones
33 |
34 | f = open(filepath, 'w', encoding='utf-8')
35 | f.write("HD_DATA_TXT 300\n\n")
36 | f.write("skeleton %d\n{\n" % len(bones))
37 | r = Quaternion([0, 0, 1], -pi / 2)
38 | for bone in bones:
39 | head = bone.head_local.xzy
40 | q = bone.matrix_local.to_quaternion()
41 | q = (q @ r)
42 | q = Quaternion([-q.w, q.x, q.y, -q.z])
43 |
44 | bone_name = boneRenameHaydee(bone.name)
45 |
46 | bone_side = bone.length / 4
47 | f.write("\tbone %s\n\t{\n" % bone_name)
48 | f.write("\t\twidth %s;\n" % d(bone_side))
49 | f.write("\t\theight %s;\n" % d(bone_side))
50 | f.write("\t\tlength %s;\n" % d(bone.length))
51 |
52 | if bone.parent:
53 | parent_name = boneRenameHaydee(bone.parent.name)
54 | f.write("\t\tparent %s;\n" % parent_name)
55 | head = bone.head_local
56 | head = Vector((head.x, head.z, head.y))
57 |
58 | head = Vector((-head.x, head.y, -head.z))
59 | q = Quaternion([q.x, q.z, q.y, q.w])
60 | f.write("\t\torigin %s %s %s;\n" % (d(head.x), d(head.y), d(head.z)))
61 | f.write("\t\taxis %s %s %s %s;\n" % (d(q.w), d(q.x), d(q.y), d(q.z)))
62 | f.write("\t}\n")
63 |
64 | f.write("}\n")
65 | f.close()
66 | return {'FINISHED'}
67 |
68 |
69 | class ExportHaydeeDSkel(Operator, ExportHelper):
70 | bl_idname = "haydee_exporter.dskel"
71 | bl_label = "Export Haydee DSkel (.dskel)"
72 | bl_options = {'REGISTER'}
73 | filename_ext = ".dskel"
74 | filter_glob: StringProperty(
75 | default="*.dskel",
76 | options={'HIDDEN'},
77 | maxlen=255, # Max internal buffer length, longer would be clamped.
78 | )
79 |
80 | def invoke(self, context, event):
81 | context.window_manager.fileselect_add(self)
82 | return {'RUNNING_MODAL'}
83 |
84 | def execute(self, context):
85 | return write_dskel(self, context, self.filepath)
86 |
87 |
88 | # --------------------------------------------------------------------------------
89 | # .dpose exporter
90 | # --------------------------------------------------------------------------------
91 |
92 | def write_dpose(operator, context, filepath):
93 | armature = find_armature(operator, context)
94 | if armature is None:
95 | return {'FINISHED'}
96 |
97 | bones = armature.pose.bones
98 |
99 | f = open(filepath, 'w', encoding='utf-8')
100 | f.write("HD_DATA_TXT 300\n\n")
101 | f.write("pose\n{\n\tnumTransforms %d;\n" % len(bones))
102 | r = Quaternion([0, 0, 1], pi / 2)
103 | for bone in bones:
104 | head = bone.head.xzy
105 | q = bone.matrix.to_quaternion()
106 | q = -(q @ r)
107 | if bone.parent:
108 | head = bone.parent.matrix.inverted().to_quaternion() @ (bone.head - bone.parent.head)
109 | head = Vector((-head.y, head.z, head.x))
110 | q = (bone.parent.matrix.to_3x3().inverted() @ bone.matrix.to_3x3()).to_quaternion()
111 | q = Quaternion([q.z, -q.y, q.x, -q.w])
112 |
113 | f.write("\ttransform %s %s %s %s %s %s %s %s;\n" % (
114 | boneRenameHaydee(bone.name),
115 | d(-head.x), d(head.y), d(-head.z),
116 | d(q.x), d(-q.w), d(q.y), d(q.z)))
117 |
118 | f.write("}\n")
119 | f.close()
120 | return {'FINISHED'}
121 |
122 |
123 | class ExportHaydeeDPose(Operator, ExportHelper):
124 | bl_idname = "haydee_exporter.dpose"
125 | bl_label = "Export Haydee DPose (.dpose)"
126 | bl_options = {'REGISTER'}
127 | filename_ext = ".dpose"
128 | filter_glob: StringProperty(
129 | default="*.dpose",
130 | options={'HIDDEN'},
131 | maxlen=255, # Max internal buffer length, longer would be clamped.
132 | )
133 |
134 | def invoke(self, context, event):
135 | context.window_manager.fileselect_add(self)
136 | return {'RUNNING_MODAL'}
137 |
138 | def execute(self, context):
139 | return write_dpose(self, context, self.filepath)
140 |
141 |
142 | # --------------------------------------------------------------------------------
143 | # .dmot exporter
144 | # --------------------------------------------------------------------------------
145 |
146 | def write_dmot(operator, context, filepath):
147 | armature = find_armature(operator, context)
148 | if armature is None:
149 | return {'FINISHED'}
150 |
151 | bones = armature.pose.bones
152 | keyframeCount = bpy.context.scene.frame_end - bpy.context.scene.frame_start + 1
153 | previousFrame = bpy.context.scene.frame_current
154 | wm = bpy.context.window_manager
155 |
156 | lines = {}
157 | for bone in bones:
158 | name = boneRenameHaydee(bone.name)
159 | lines[name] = []
160 |
161 | r = Quaternion([0, 0, 1], pi / 2)
162 | wm.progress_begin(0, keyframeCount)
163 | for frame in range(keyframeCount):
164 | wm.progress_update(frame)
165 | context.scene.frame_set(frame + bpy.context.scene.frame_start)
166 | for bone in bones:
167 |
168 | head = bone.head.xzy
169 | q = bone.matrix.to_quaternion()
170 | q = -(q @ r)
171 | if bone.parent:
172 | head = bone.parent.matrix.inverted().to_quaternion() @ (bone.head - bone.parent.head)
173 | head = Vector((-head.y, head.z, head.x))
174 | q = (bone.parent.matrix.to_3x3().inverted() @ bone.matrix.to_3x3()).to_quaternion()
175 | q = Quaternion([-q.z, -q.y, q.x, -q.w])
176 |
177 | name = boneRenameHaydee(bone.name)
178 | lines[name].append("\t\tkey %s %s %s %s %s %s %s;\n" % (
179 | d(-head.x), d(head.y), d(-head.z),
180 | d(q.x), d(q.w), d(q.y), d(q.z)))
181 | wm.progress_end()
182 |
183 | context.scene.frame_set(previousFrame)
184 |
185 | f = open(filepath, 'w', encoding='utf-8')
186 | f.write("HD_DATA_TXT 300\n\n")
187 | f.write("motion\n{\n")
188 | f.write("\tnumTracks %d;\n" % len(bones))
189 | f.write("\tnumFrames %d;\n" % keyframeCount)
190 | f.write("\tframeRate %g;\n" % context.scene.render.fps)
191 | for bone in bones:
192 | name = boneRenameHaydee(bone.name)
193 | f.write("\ttrack %s\n\t{\n" % name)
194 | f.write("".join(lines[name]))
195 | f.write("\t}\n")
196 | f.write("}\n")
197 | f.close()
198 | return {'FINISHED'}
199 |
200 |
201 | class ExportHaydeeDMotion(Operator, ExportHelper):
202 | bl_idname = "haydee_exporter.dmot"
203 | bl_label = "Export Haydee DMotion (.dmot)"
204 | bl_options = {'REGISTER'}
205 | filename_ext = ".dmot"
206 | filter_glob: StringProperty(
207 | default="*.dmot",
208 | options={'HIDDEN'},
209 | maxlen=255, # Max internal buffer length, longer would be clamped.
210 | )
211 |
212 | def invoke(self, context, event):
213 | context.window_manager.fileselect_add(self)
214 | return {'RUNNING_MODAL'}
215 |
216 | def execute(self, context):
217 | return write_dmot(self, context, self.filepath)
218 |
219 |
220 | # --------------------------------------------------------------------------------
221 | # .dmesh exporter
222 | # --------------------------------------------------------------------------------
223 |
224 | def write_dmesh(operator, context, filepath, export_skeleton,
225 | apply_modifiers, selected_only, separate_files,
226 | ignore_hidden, SELECTED_MATERIAL, file_format):
227 | print("Exporting mesh, material: %s" % SELECTED_MATERIAL)
228 |
229 | mesh_count = 0
230 | for ob in context.scene.objects:
231 | if ob.type == "MESH":
232 | if SELECTED_MATERIAL == '__ALL__':
233 | mesh_count += 1
234 | else:
235 | for n in range(len(ob.material_slots)):
236 | if ob.material_slots[n].name == SELECTED_MATERIAL:
237 | mesh_count += 1
238 | break
239 |
240 | if selected_only:
241 | list = context.selected_objects
242 | if len(list) == 0:
243 | list = context.scene.objects
244 | else:
245 | list = context.scene.objects
246 |
247 | (vertex_output, uvs_output, groups_output, groups_count,
248 | joints_output, weights_output, weights_count, group_count,
249 | base_vertex_index, base_uv_index, armature, unique_uvs_pos,
250 | uvs_dict) = reset_variables()
251 | group_name = None
252 |
253 | for ob in sorted([x for x in list if x.type == 'MESH'], key=lambda ob: ob.name):
254 | if ob.type == "MESH":
255 | # TODO ignore hidden objects
256 | if ignore_hidden and ob.hide_viewport:
257 | continue
258 |
259 | if separate_files:
260 | (vertex_output, uvs_output, groups_output, groups_count,
261 | joints_output, weights_output, weights_count, group_count,
262 | base_vertex_index, base_uv_index, armature, unique_uvs_pos,
263 | uvs_dict) = reset_variables()
264 |
265 | settings = 'PREVIEW'
266 | # XXX TO MESH
267 | # mesh = ob.to_mesh(context.depsgraph, apply_modifiers)
268 | depsgraph = context.evaluated_depsgraph_get()
269 | ob_for_convert = ob.evaluated_get(depsgraph) if apply_modifiers else ob.original
270 | mesh = ob_for_convert.to_mesh()
271 | mat = ob.matrix_world
272 | vertices = mesh.vertices
273 | materials = mesh.materials
274 | polygons = mesh.polygons
275 | material_index = -1
276 | if len(mesh.uv_layers) >= 1:
277 | uvs_data = mesh.uv_layers[0].data
278 | else:
279 | uvs_data = None
280 |
281 | first_uv_index = base_uv_index
282 | first_vertex_index = base_vertex_index
283 | vertex_map = {}
284 | new_mesh_uvs = []
285 | if SELECTED_MATERIAL == '__ALL__':
286 | for n in range(len(vertices)):
287 | vertex_map[n] = base_vertex_index + n
288 | if uvs_data is not None:
289 | for n, uv in enumerate(uvs_data):
290 | uv_pos = uv.uv
291 | if uv_pos in unique_uvs_pos:
292 | idx = unique_uvs_pos.index(uv_pos)
293 | else:
294 | idx = len(unique_uvs_pos)
295 | unique_uvs_pos.append(uv_pos)
296 | new_mesh_uvs.append(uv_pos)
297 | uvs_dict.append(idx)
298 | base_uv_index += len(uvs_data)
299 | base_vertex_index += len(vertices)
300 | else:
301 | for n in range(len(ob.material_slots)):
302 | if ob.material_slots[n].name == SELECTED_MATERIAL:
303 | material_index = n
304 | break
305 | if material_index == -1:
306 | print("Ignoring mesh %s since no material %s found" % (ob.name, SELECTED_MATERIAL))
307 | continue
308 | for polygon in mesh.polygons:
309 | if polygon.material_index == material_index:
310 | if uvs_data is not None:
311 | for uvIdx in polygon.loop_indices:
312 | uv_pos = uvs_data[uvIdx].uv
313 | if uv_pos in unique_uvs_pos:
314 | idx = unique_uvs_pos.index(uv_pos)
315 | else:
316 | idx = len(unique_uvs_pos)
317 | unique_uvs_pos.append(uv_pos)
318 | new_mesh_uvs.append(uv_pos)
319 | uvs_dict.append(idx)
320 | base_uv_index += 1
321 | for vertex in polygon.vertices:
322 | if not (vertex in vertex_map):
323 | vertex_map[vertex] = base_vertex_index
324 | base_vertex_index += 1
325 | if len(vertex_map) == 0:
326 | print("Ignoring mesh %s since no vertices found with material %s" % (ob.name, SELECTED_MATERIAL))
327 | continue
328 |
329 | if uvs_data is None:
330 | operator.report({'ERROR'}, "Mesh " + ob.name + " is missing UV information")
331 | continue
332 | # Export vertices
333 | vertex_count = base_vertex_index - first_vertex_index
334 | print("Exporting %d vertices" % vertex_count)
335 | vertex_indexes = [0] * vertex_count
336 | for key, value in vertex_map.items():
337 | vertex_indexes[value - first_vertex_index] = key
338 | for index in vertex_indexes:
339 | v = vertices[index]
340 | co = mat @ v.co
341 | vertex_output.append("\t\tvert %s %s %s;\n" % (d(-co.x), d(co.z), d(-co.y)))
342 |
343 | # Export UV map
344 | uv_count = base_uv_index - first_uv_index
345 | print("Exporting %d uvs" % uv_count)
346 | uv_indexes = [-1] * uv_count
347 | if len(mesh.uv_layers) >= 1:
348 | for uv in new_mesh_uvs:
349 | uv_coord = Vector(uv)
350 | if (file_format == 'H2'):
351 | uv_coord = Vector((uv_coord.x, 1-uv_coord.y))
352 | uvs_output.append("\t\tuv %s %s;\n" % (d(uv_coord.x), d(uv_coord.y)))
353 |
354 | EXPORT_SMOOTH_GROUPS = False
355 | EXPORT_SMOOTH_GROUPS_BITFLAGS = True
356 | if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS):
357 | smooth_groups, smooth_groups_tot = mesh.calc_smooth_groups(use_bitflags=EXPORT_SMOOTH_GROUPS_BITFLAGS)
358 | if smooth_groups_tot <= 1:
359 | smooth_groups, smooth_groups_tot = (), 0
360 | else:
361 | smooth_groups, smooth_groups_tot = (), 0
362 |
363 | # Export faces (by material)
364 | current_material_index = -1
365 |
366 | if SELECTED_MATERIAL == '__ALL__':
367 | current_material_index += 1
368 | if material_index != -1 and material_index != current_material_index:
369 | continue
370 | count = 0
371 | for polygon in polygons:
372 | if polygon.material_index == current_material_index:
373 | count += 1
374 | if count == 0:
375 | continue
376 |
377 | if len(materials) > 1:
378 | group_name = ob.name + '_' + mat.name
379 | else:
380 | group_name = ob.name
381 | regex = re.compile('^[0-9]')
382 | if regex.match(group_name):
383 | group_name = 'x' + group_name
384 | group_name = stripName(group_name)
385 | group_name = group_name[:NAME_LIMIT]
386 | # if not group_name:
387 | # operator.report({'ERROR'}, "Mesh " + ob.name + ", no group name")
388 | # continue
389 | print(group_name, 'count', count)
390 | if group_name in groups_output:
391 | group_output = groups_output[group_name]
392 | else:
393 | group_output = []
394 | groups_count[group_name] = 0
395 |
396 | for polygon in polygons:
397 | if polygon.material_index == current_material_index:
398 | groups_count[group_name] += 1
399 | group_output.append("\t\t\tface\n\t\t\t{\n")
400 | group_output.append("\t\t\t\tcount %d;\n" % len(polygon.vertices))
401 | group_output.append("\t\t\t\tverts ")
402 | for v in tuple(polygon.vertices)[::-1]:
403 | group_output.append(" %d" % vertex_map[v])
404 | group_output.append(";\n")
405 | if uvs_data is not None:
406 | group_output.append("\t\t\t\tuvs ")
407 | for v in tuple(polygon.loop_indices)[::-1]:
408 | group_output.append(" %d" % uvs_dict[v + first_uv_index])
409 | group_output.append(";\n")
410 | if smooth_groups_tot:
411 | group_output.append("\t\t\t\tsmoothGroup %d;\n\t\t\t}\n" % smooth_groups[polygon.index])
412 | else:
413 | group_output.append("\t\t\t\tsmoothGroup %d;\n\t\t\t}\n" % 0)
414 | groups_output[group_name] = group_output
415 |
416 | else:
417 |
418 | for mat in materials:
419 | current_material_index += 1
420 | if material_index != -1 and material_index != current_material_index:
421 | continue
422 | count = 0
423 | for polygon in polygons:
424 | if polygon.material_index == current_material_index:
425 | count += 1
426 | if count == 0:
427 | continue
428 |
429 | if len(materials) > 1:
430 | group_name = ob.name + '_' + mat.name
431 | else:
432 | group_name = ob.name
433 | regex = re.compile('^[0-9]')
434 | if regex.match(group_name):
435 | group_name = 'x' + group_name
436 | group_name = stripName(group_name)
437 | group_name = group_name[:NAME_LIMIT]
438 |
439 | # if not group_name:
440 | # operator.report({'ERROR'}, "Mesh " + ob.name + ", no group name")
441 | # continue
442 |
443 | print(group_name, 'count', count)
444 | if group_name in groups_output:
445 | group_output = groups_output[group_name]
446 | else:
447 | group_output = []
448 | groups_count[group_name] = 0
449 |
450 | for polygon in polygons:
451 | if polygon.material_index == current_material_index:
452 | groups_count[group_name] += 1
453 | group_output.append("\t\t\tface\n\t\t\t{\n")
454 | group_output.append("\t\t\t\tcount %d;\n" % len(polygon.vertices))
455 | group_output.append("\t\t\t\tverts ")
456 | for v in tuple(polygon.vertices)[::-1]:
457 | group_output.append(" %d" % vertex_map[v])
458 | group_output.append(";\n")
459 | if uvs_data is not None:
460 | group_output.append("\t\t\t\tuvs ")
461 | for v in tuple(polygon.loop_indices)[::-1]:
462 | group_output.append(" %d" % uvs_dict[v + first_uv_index])
463 | group_output.append(";\n")
464 | if smooth_groups_tot:
465 | group_output.append("\t\t\t\tsmoothGroup %d;\n\t\t\t}\n" % smooth_groups[polygon.index])
466 | else:
467 | group_output.append("\t\t\t\tsmoothGroup %d;\n\t\t\t}\n" % 0)
468 | groups_output[group_name] = group_output
469 |
470 | # Export skeleton
471 | if export_skeleton:
472 | for x in range(1):
473 | if ob.find_armature():
474 |
475 | print("Exporting armature: " + ob.find_armature().name)
476 |
477 | if armature is None:
478 | armature = ob.find_armature()
479 | bones = armature.data.bones
480 | mat = armature.matrix_world
481 |
482 | joints_output.append("\tjoints %d\n\t{\n" % len(bones))
483 | bone_indexes = {}
484 | bone_index = 0
485 | r = Quaternion([0, 0, 1], -pi / 2)
486 | for bone in bones:
487 | head = mat @ bone.head.xyz
488 | q = bone.matrix_local.to_quaternion()
489 | q = q @ r
490 | bone_indexes[bone.name[:NAME_LIMIT]] = bone_index
491 | bone_index += 1
492 |
493 | bone_name = boneRenameHaydee(bone.name)
494 |
495 | # print("Bone %s quaternion: %s" % (bone.name, bone.matrix.to_quaternion() @ r))
496 | joints_output.append("\t\tjoint %s\n\t\t{\n" % bone_name)
497 | if bone.parent:
498 | parent_name = boneRenameHaydee(bone.parent.name)
499 | joints_output.append("\t\t\tparent %s;\n" % parent_name)
500 | q = (bone.parent.matrix_local.to_3x3().inverted() @ bone.matrix_local.to_3x3()).to_quaternion()
501 | q = Quaternion([q.w, -q.y, q.x, q.z])
502 | print("%s head: %s parent head: %s" % (bone.name[:NAME_LIMIT], bone.head, bone.parent.head_local))
503 | head = (mat @ bone.parent.matrix_local.inverted()).to_quaternion() @ (bone.head_local - bone.parent.head_local)
504 | head = Vector((-head.y, head.x, head.z))
505 |
506 | head = Vector((-head.x, -head.y, head.z))
507 | head = Vector((head.x, head.z, head.y))
508 | q = Quaternion([-q.w, q.x, -q.z, q.y])
509 | q = Quaternion([q.x, q.y, q.z, q.w])
510 | joints_output.append("\t\t\torigin %s %s %s;\n" % (d(head.x), d(head.y), d(head.z)))
511 | joints_output.append("\t\t\taxis %s %s %s %s;\n" % (d(q.w), d(q.x), d(q.y), d(q.z)))
512 | joints_output.append("\t\t}\n")
513 | joints_output.append("\t}\n")
514 |
515 | elif armature.name != ob.find_armature().name:
516 | operator.report({'ERROR'}, "Multiple armatures present, please select only one")
517 | continue
518 |
519 | vertex_weights = {}
520 | vertex_groups = ob.vertex_groups
521 | invalid_weighting = False
522 | for v in mesh.vertices:
523 | for g in v.groups:
524 | group = vertex_groups[g.group]
525 | if not (group.name[:NAME_LIMIT] in bone_indexes):
526 | continue
527 | bone = bone_indexes[group.name[:NAME_LIMIT]]
528 | if v.index in vertex_map:
529 | if g.weight > 0:
530 | i = vertex_map[v.index]
531 | if not (i in vertex_weights):
532 | vertex_weights[i] = []
533 | vertex_weights[i].append((vertex_map[v.index], bone, g.weight))
534 | for i in sorted(vertex_weights.keys()):
535 | weight_list = vertex_weights[i]
536 | # sort bone names first?
537 | # weight_list = sorted(weight_list, key=lambda bw: bw[1], reverse=True)
538 | weight_list = sorted(weight_list, key=lambda bw: bw[2], reverse=True)
539 | # if len(weight_list) > 4:
540 | # weight_list = weight_list[0:3]
541 | sum = 0
542 | for w in weight_list:
543 | sum += w[2]
544 | for w in weight_list:
545 | normalized_weight = w[2] / sum
546 | weights_output.append("\t\tweight %d %d %s;\n" % (w[0], w[1], d(normalized_weight)))
547 | weights_count += 1
548 | # clean up
549 | ob_for_convert.to_mesh_clear()
550 |
551 | if separate_files:
552 | to_file(separate_files, filepath, group_name, base_vertex_index, vertex_output,
553 | unique_uvs_pos, uvs_output, groups_output, groups_count,
554 | joints_output, weights_count, weights_output)
555 |
556 | if not separate_files:
557 | if base_vertex_index == 0 or not group_name:
558 | operator.report({'ERROR'}, "Nothing to export")
559 | return {'FINISHED'}
560 |
561 | to_file(separate_files, filepath, group_name, base_vertex_index, vertex_output,
562 | unique_uvs_pos, uvs_output, groups_output, groups_count,
563 | joints_output, weights_count, weights_output)
564 |
565 | return {'FINISHED'}
566 |
567 |
568 | def reset_variables():
569 | vertex_output = []
570 | uvs_output = []
571 | groups_output = {}
572 | groups_count = {}
573 | joints_output = []
574 | weights_output = []
575 | weights_count = 0
576 | group_count = 0
577 | base_vertex_index = 0
578 | base_uv_index = 0
579 | armature = None
580 | unique_uvs_pos = []
581 | uvs_dict = []
582 |
583 | return (vertex_output, uvs_output, groups_output, groups_count,
584 | joints_output, weights_output, weights_count, group_count,
585 | base_vertex_index, base_uv_index, armature, unique_uvs_pos, uvs_dict)
586 |
587 |
588 | def to_file(separate_files, filepath, group_name, base_vertex_index,
589 | vertex_output, unique_uvs_pos, uvs_output, groups_output,
590 | groups_count, joints_output, weights_count, weights_output):
591 |
592 | if separate_files:
593 | folder_path, basename = (os.path.split(filepath))
594 | name, ext = (os.path.splitext(filepath))
595 | filepath = os.path.join(folder_path, "{}{}".format(group_name, ext))
596 |
597 | # Write file contents
598 | with open(filepath, 'w', encoding='utf-8') as f:
599 | f.write("HD_DATA_TXT 300\n\n")
600 | f.write("mesh\n{\n")
601 | f.write("\tverts %d\n\t{\n" % base_vertex_index)
602 | f.write("".join(vertex_output))
603 | f.write("\t}\n")
604 | f.write("\tuvs %d\n\t{\n" % len(unique_uvs_pos))
605 | f.write("".join(uvs_output))
606 | f.write("\t}\n")
607 | f.write("\tgroups %d\n\t{\n" % len(groups_output))
608 | for name, contents in groups_output.items():
609 | f.write("\t\tgroup %s %d\n\t\t{\n" % (name, groups_count[name]))
610 | f.write("".join(contents))
611 | f.write("\t\t}\n")
612 | f.write("\t}\n")
613 | f.write("".join(joints_output))
614 | if weights_count > 0:
615 | f.write("\tweights %d\n\t{\n" % weights_count)
616 | f.write("".join(weights_output))
617 | f.write("\t}\n")
618 | f.write("}\n")
619 |
620 |
621 | class ExportHaydeeDMesh(Operator, ExportHelper):
622 | bl_idname = "haydee_exporter.dmesh"
623 | bl_label = "Export Haydee dmesh"
624 | bl_options = {'REGISTER'}
625 | filename_ext = ".dmesh"
626 | filter_glob: StringProperty(
627 | default="*.dmesh",
628 | options={'HIDDEN'},
629 | maxlen=255, # Max internal buffer length, longer would be clamped.
630 | )
631 |
632 | # List of operator properties, the attributes will be assigned
633 | # to the class instance from the operator settings before calling.
634 | file_format: file_format_prop
635 |
636 | selected_only: BoolProperty(
637 | name="Selected only",
638 | description="Export only selected objects (if nothing is selected, full scene will be exported regardless of this setting)",
639 | default=True,
640 | )
641 | separate_files: BoolProperty(
642 | name="Export to Separate Files",
643 | description="Export each object to a separate file",
644 | default=False,
645 | )
646 | ignore_hidden: BoolProperty(
647 | name="Ignore hidden",
648 | description="Ignore hidden objects",
649 | default=True,
650 | )
651 | apply_modifiers: BoolProperty(
652 | name="Apply modifiers",
653 | description="Apply modifiers before exporting",
654 | default=True,
655 | )
656 | export_skeleton: BoolProperty(
657 | name="Export skeleton",
658 | description="Export skeleton and vertex weights",
659 | default=True,
660 | )
661 | material: EnumProperty(
662 | name="Material",
663 | description="Material to export",
664 | items=materials_list
665 | )
666 |
667 | def invoke(self, context, event):
668 | context.window_manager.fileselect_add(self)
669 | return {'RUNNING_MODAL'}
670 |
671 | def execute(self, context):
672 | return write_dmesh(self, context, self.filepath, self.export_skeleton,
673 | self.apply_modifiers, self.selected_only, self.separate_files,
674 | self.ignore_hidden, self.material, self.file_format)
675 |
676 |
677 | # --------------------------------------------------------------------------------
678 | # Initialization & menu
679 | # --------------------------------------------------------------------------------
680 | class HaydeeExportSubMenu(bpy.types.Menu):
681 | bl_idname = "OBJECT_MT_haydee_export_submenu"
682 | bl_label = "Haydee"
683 |
684 | def draw(self, context):
685 | layout = self.layout
686 | layout.operator(ExportHaydeeDMesh.bl_idname, text="Haydee DMesh (.dmesh)")
687 | layout.operator(ExportHaydeeDSkel.bl_idname, text="Haydee DSkel (.dskel)")
688 | layout.operator(ExportHaydeeDPose.bl_idname, text="Haydee DPose (.dpose)")
689 | layout.operator(ExportHaydeeDMotion.bl_idname, text="Haydee DMotion (.dmot)")
690 |
691 |
692 | def menu_func_export(self, context):
693 | my_icon = HaydeeMenuIcon.custom_icons["main"]["haydee_icon"]
694 | self.layout.menu(HaydeeExportSubMenu.bl_idname, icon_value=my_icon.icon_id)
695 |
696 |
697 | # --------------------------------------------------------------------------------
698 | # Register
699 | # --------------------------------------------------------------------------------
700 | def register():
701 | bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
702 |
703 |
704 | def unregister():
705 | bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
706 |
707 |
708 | if __name__ == "__main__":
709 | register()
710 |
711 | # test call
712 | # bpy.ops.haydee_exporter.motion('INVOKE_DEFAULT')
713 |
--------------------------------------------------------------------------------
/addon_updater_ops.py:
--------------------------------------------------------------------------------
1 | # ##### BEGIN GPL LICENSE BLOCK #####
2 | #
3 | # This program is free software; you can redistribute it and/or
4 | # modify it under the terms of the GNU General Public License
5 | # as published by the Free Software Foundation; either version 2
6 | # of the License, or (at your option) any later version.
7 | #
8 | # This program is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with this program; if not, write to the Free Software Foundation,
15 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16 | #
17 | # ##### END GPL LICENSE BLOCK #####
18 |
19 | """Blender UI integrations for the addon updater.
20 |
21 | Implements draw calls, popups, and operators that use the addon_updater.
22 | """
23 |
24 | import os
25 | import traceback
26 |
27 | import bpy
28 | from bpy.app.handlers import persistent
29 |
30 | # Safely import the updater.
31 | # Prevents popups for users with invalid python installs e.g. missing libraries
32 | # and will replace with a fake class instead if it fails (so UI draws work).
33 | try:
34 | from .addon_updater import Updater as updater
35 | except Exception as e:
36 | print("ERROR INITIALIZING UPDATER")
37 | print(str(e))
38 | traceback.print_exc()
39 |
40 | class SingletonUpdaterNone(object):
41 | """Fake, bare minimum fields and functions for the updater object."""
42 |
43 | def __init__(self):
44 | self.invalid_updater = True # Used to distinguish bad install.
45 |
46 | self.addon = None
47 | self.verbose = False
48 | self.use_print_traces = True
49 | self.error = None
50 | self.error_msg = None
51 | self.async_checking = None
52 |
53 | def clear_state(self):
54 | self.addon = None
55 | self.verbose = False
56 | self.invalid_updater = True
57 | self.error = None
58 | self.error_msg = None
59 | self.async_checking = None
60 |
61 | def run_update(self, force, callback, clean):
62 | pass
63 |
64 | def check_for_update(self, now):
65 | pass
66 |
67 | updater = SingletonUpdaterNone()
68 | updater.error = "Error initializing updater module"
69 | updater.error_msg = str(e)
70 |
71 | # Must declare this before classes are loaded, otherwise the bl_idname's will
72 | # not match and have errors. Must be all lowercase and no spaces! Should also
73 | # be unique among any other addons that could exist (using this updater code),
74 | # to avoid clashes in operator registration.
75 | updater.addon = "haydee_tools"
76 |
77 |
78 | # -----------------------------------------------------------------------------
79 | # Blender version utils
80 | # -----------------------------------------------------------------------------
81 | def make_annotations(cls):
82 | """Add annotation attribute to fields to avoid Blender 2.8+ warnings"""
83 | if not hasattr(bpy.app, "version") or bpy.app.version < (2, 80):
84 | return cls
85 | if bpy.app.version < (2, 93, 0):
86 | bl_props = {k: v for k, v in cls.__dict__.items()
87 | if isinstance(v, tuple)}
88 | else:
89 | bl_props = {k: v for k, v in cls.__dict__.items()
90 | if isinstance(v, bpy.props._PropertyDeferred)}
91 | if bl_props:
92 | if '__annotations__' not in cls.__dict__:
93 | setattr(cls, '__annotations__', {})
94 | annotations = cls.__dict__['__annotations__']
95 | for k, v in bl_props.items():
96 | annotations[k] = v
97 | delattr(cls, k)
98 | return cls
99 |
100 |
101 | def layout_split(layout, factor=0.0, align=False):
102 | """Intermediate method for pre and post blender 2.8 split UI function"""
103 | if not hasattr(bpy.app, "version") or bpy.app.version < (2, 80):
104 | return layout.split(percentage=factor, align=align)
105 | return layout.split(factor=factor, align=align)
106 |
107 |
108 | def get_user_preferences(context=None):
109 | """Intermediate method for pre and post blender 2.8 grabbing preferences"""
110 | if not context:
111 | context = bpy.context
112 | prefs = None
113 | if hasattr(context, "user_preferences"):
114 | prefs = context.user_preferences.addons.get(__package__, None)
115 | elif hasattr(context, "preferences"):
116 | prefs = context.preferences.addons.get(__package__, None)
117 | if prefs:
118 | return prefs.preferences
119 | # To make the addon stable and non-exception prone, return None
120 | # raise Exception("Could not fetch user preferences")
121 | return None
122 |
123 |
124 | # -----------------------------------------------------------------------------
125 | # Updater operators
126 | # -----------------------------------------------------------------------------
127 |
128 |
129 | # Simple popup to prompt use to check for update & offer install if available.
130 | class AddonUpdaterInstallPopup(bpy.types.Operator):
131 | """Check and install update if available"""
132 | bl_label = "Update {x} addon".format(x=updater.addon)
133 | bl_idname = updater.addon + ".updater_install_popup"
134 | bl_description = "Popup to check and display current updates available"
135 | bl_options = {'REGISTER', 'INTERNAL'}
136 |
137 | # if true, run clean install - ie remove all files before adding new
138 | # equivalent to deleting the addon and reinstalling, except the
139 | # updater folder/backup folder remains
140 | clean_install = bpy.props.BoolProperty(
141 | name="Clean install",
142 | description=("If enabled, completely clear the addon's folder before "
143 | "installing new update, creating a fresh install"),
144 | default=False,
145 | options={'HIDDEN'}
146 | )
147 |
148 | ignore_enum = bpy.props.EnumProperty(
149 | name="Process update",
150 | description="Decide to install, ignore, or defer new addon update",
151 | items=[
152 | ("install", "Update Now", "Install update now"),
153 | ("ignore", "Ignore", "Ignore this update to prevent future popups"),
154 | ("defer", "Defer", "Defer choice till next blender session")
155 | ],
156 | options={'HIDDEN'}
157 | )
158 |
159 | def check(self, context):
160 | return True
161 |
162 | def invoke(self, context, event):
163 | return context.window_manager.invoke_props_dialog(self)
164 |
165 | def draw(self, context):
166 | layout = self.layout
167 | if updater.invalid_updater:
168 | layout.label(text="Updater module error")
169 | return
170 | elif updater.update_ready:
171 | col = layout.column()
172 | col.scale_y = 0.7
173 | col.label(text="Update {} ready!".format(updater.update_version),
174 | icon="LOOP_FORWARDS")
175 | col.label(text="Choose 'Update Now' & press OK to install, ",
176 | icon="BLANK1")
177 | col.label(text="or click outside window to defer", icon="BLANK1")
178 | row = col.row()
179 | row.prop(self, "ignore_enum", expand=True)
180 | col.split()
181 | elif not updater.update_ready:
182 | col = layout.column()
183 | col.scale_y = 0.7
184 | col.label(text="No updates available")
185 | col.label(text="Press okay to dismiss dialog")
186 | # add option to force install
187 | else:
188 | # Case: updater.update_ready = None
189 | # we have not yet checked for the update.
190 | layout.label(text="Check for update now?")
191 |
192 | # Potentially in future, UI to 'check to select/revert to old version'.
193 |
194 | def execute(self, context):
195 | # In case of error importing updater.
196 | if updater.invalid_updater:
197 | return {'CANCELLED'}
198 |
199 | if updater.manual_only:
200 | bpy.ops.wm.url_open(url=updater.website)
201 | elif updater.update_ready:
202 |
203 | # Action based on enum selection.
204 | if self.ignore_enum == 'defer':
205 | return {'FINISHED'}
206 | elif self.ignore_enum == 'ignore':
207 | updater.ignore_update()
208 | return {'FINISHED'}
209 |
210 | res = updater.run_update(force=False,
211 | callback=post_update_callback,
212 | clean=self.clean_install)
213 |
214 | # Should return 0, if not something happened.
215 | if updater.verbose:
216 | if res == 0:
217 | print("Updater returned successful")
218 | else:
219 | print("Updater returned {}, error occurred".format(res))
220 | elif updater.update_ready is None:
221 | _ = updater.check_for_update(now=True)
222 |
223 | # Re-launch this dialog.
224 | atr = AddonUpdaterInstallPopup.bl_idname.split(".")
225 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
226 | else:
227 | updater.print_verbose("Doing nothing, not ready for update")
228 | return {'FINISHED'}
229 |
230 |
231 | # User preference check-now operator
232 | class AddonUpdaterCheckNow(bpy.types.Operator):
233 | bl_label = "Check now for " + updater.addon + " update"
234 | bl_idname = updater.addon + ".updater_check_now"
235 | bl_description = "Check now for an update to the {} addon".format(
236 | updater.addon)
237 | bl_options = {'REGISTER', 'INTERNAL'}
238 |
239 | def execute(self, context):
240 | if updater.invalid_updater:
241 | return {'CANCELLED'}
242 |
243 | if updater.async_checking and updater.error is None:
244 | # Check already happened.
245 | # Used here to just avoid constant applying settings below.
246 | # Ignoring if error, to prevent being stuck on the error screen.
247 | return {'CANCELLED'}
248 |
249 | # apply the UI settings
250 | settings = get_user_preferences(context)
251 | if not settings:
252 | updater.print_verbose(
253 | "Could not get {} preferences, update check skipped".format(
254 | __package__))
255 | return {'CANCELLED'}
256 |
257 | updater.set_check_interval(
258 | enabled=settings.auto_check_update,
259 | months=settings.updater_interval_months,
260 | days=settings.updater_interval_days,
261 | hours=settings.updater_interval_hours,
262 | minutes=settings.updater_interval_minutes)
263 |
264 | # Input is an optional callback function. This function should take a
265 | # bool input. If true: update ready, if false: no update ready.
266 | updater.check_for_update_now(ui_refresh)
267 |
268 | return {'FINISHED'}
269 |
270 |
271 | class AddonUpdaterUpdateNow(bpy.types.Operator):
272 | bl_label = "Update " + updater.addon + " addon now"
273 | bl_idname = updater.addon + ".updater_update_now"
274 | bl_description = "Update to the latest version of the {x} addon".format(
275 | x=updater.addon)
276 | bl_options = {'REGISTER', 'INTERNAL'}
277 |
278 | # If true, run clean install - ie remove all files before adding new
279 | # equivalent to deleting the addon and reinstalling, except the updater
280 | # folder/backup folder remains.
281 | clean_install = bpy.props.BoolProperty(
282 | name="Clean install",
283 | description=("If enabled, completely clear the addon's folder before "
284 | "installing new update, creating a fresh install"),
285 | default=False,
286 | options={'HIDDEN'}
287 | )
288 |
289 | def execute(self, context):
290 |
291 | # in case of error importing updater
292 | if updater.invalid_updater:
293 | return {'CANCELLED'}
294 |
295 | if updater.manual_only:
296 | bpy.ops.wm.url_open(url=updater.website)
297 | if updater.update_ready:
298 | # if it fails, offer to open the website instead
299 | try:
300 | res = updater.run_update(force=False,
301 | callback=post_update_callback,
302 | clean=self.clean_install)
303 |
304 | # Should return 0, if not something happened.
305 | if updater.verbose:
306 | if res == 0:
307 | print("Updater returned successful")
308 | else:
309 | print("Updater error response: {}".format(res))
310 | except Exception as expt:
311 | updater._error = "Error trying to run update"
312 | updater._error_msg = str(expt)
313 | updater.print_trace()
314 | atr = AddonUpdaterInstallManually.bl_idname.split(".")
315 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
316 | elif updater.update_ready is None:
317 | (update_ready, version, link) = updater.check_for_update(now=True)
318 | # Re-launch this dialog.
319 | atr = AddonUpdaterInstallPopup.bl_idname.split(".")
320 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
321 |
322 | elif not updater.update_ready:
323 | self.report({'INFO'}, "Nothing to update")
324 | return {'CANCELLED'}
325 | else:
326 | self.report(
327 | {'ERROR'}, "Encountered a problem while trying to update")
328 | return {'CANCELLED'}
329 |
330 | return {'FINISHED'}
331 |
332 |
333 | class AddonUpdaterUpdateTarget(bpy.types.Operator):
334 | bl_label = updater.addon + " version target"
335 | bl_idname = updater.addon + ".updater_update_target"
336 | bl_description = "Install a targeted version of the {x} addon".format(
337 | x=updater.addon)
338 | bl_options = {'REGISTER', 'INTERNAL'}
339 |
340 | def target_version(self, context):
341 | # In case of error importing updater.
342 | if updater.invalid_updater:
343 | ret = []
344 |
345 | ret = []
346 | i = 0
347 | for tag in updater.tags:
348 | ret.append((tag, tag, "Select to install " + tag))
349 | i += 1
350 | return ret
351 |
352 | target = bpy.props.EnumProperty(
353 | name="Target version to install",
354 | description="Select the version to install",
355 | items=target_version
356 | )
357 |
358 | # If true, run clean install - ie remove all files before adding new
359 | # equivalent to deleting the addon and reinstalling, except the
360 | # updater folder/backup folder remains.
361 | clean_install = bpy.props.BoolProperty(
362 | name="Clean install",
363 | description=("If enabled, completely clear the addon's folder before "
364 | "installing new update, creating a fresh install"),
365 | default=False,
366 | options={'HIDDEN'}
367 | )
368 |
369 | @classmethod
370 | def poll(cls, context):
371 | if updater.invalid_updater:
372 | return False
373 | return updater.update_ready is not None and len(updater.tags) > 0
374 |
375 | def invoke(self, context, event):
376 | return context.window_manager.invoke_props_dialog(self)
377 |
378 | def draw(self, context):
379 | layout = self.layout
380 | if updater.invalid_updater:
381 | layout.label(text="Updater error")
382 | return
383 | split = layout_split(layout, factor=0.5)
384 | sub_col = split.column()
385 | sub_col.label(text="Select install version")
386 | sub_col = split.column()
387 | sub_col.prop(self, "target", text="")
388 |
389 | def execute(self, context):
390 | # In case of error importing updater.
391 | if updater.invalid_updater:
392 | return {'CANCELLED'}
393 |
394 | res = updater.run_update(
395 | force=False,
396 | revert_tag=self.target,
397 | callback=post_update_callback,
398 | clean=self.clean_install)
399 |
400 | # Should return 0, if not something happened.
401 | if res == 0:
402 | updater.print_verbose("Updater returned successful")
403 | else:
404 | updater.print_verbose(
405 | "Updater returned {}, , error occurred".format(res))
406 | return {'CANCELLED'}
407 |
408 | return {'FINISHED'}
409 |
410 |
411 | class AddonUpdaterInstallManually(bpy.types.Operator):
412 | """As a fallback, direct the user to download the addon manually"""
413 | bl_label = "Install update manually"
414 | bl_idname = updater.addon + ".updater_install_manually"
415 | bl_description = "Proceed to manually install update"
416 | bl_options = {'REGISTER', 'INTERNAL'}
417 |
418 | error = bpy.props.StringProperty(
419 | name="Error Occurred",
420 | default="",
421 | options={'HIDDEN'}
422 | )
423 |
424 | def invoke(self, context, event):
425 | return context.window_manager.invoke_popup(self)
426 |
427 | def draw(self, context):
428 | layout = self.layout
429 |
430 | if updater.invalid_updater:
431 | layout.label(text="Updater error")
432 | return
433 |
434 | # Display error if a prior autoamted install failed.
435 | if self.error != "":
436 | col = layout.column()
437 | col.scale_y = 0.7
438 | col.label(text="There was an issue trying to auto-install",
439 | icon="ERROR")
440 | col.label(text="Press the download button below and install",
441 | icon="BLANK1")
442 | col.label(text="the zip file like a normal addon.", icon="BLANK1")
443 | else:
444 | col = layout.column()
445 | col.scale_y = 0.7
446 | col.label(text="Install the addon manually")
447 | col.label(text="Press the download button below and install")
448 | col.label(text="the zip file like a normal addon.")
449 |
450 | # If check hasn't happened, i.e. accidentally called this menu,
451 | # allow to check here.
452 |
453 | row = layout.row()
454 |
455 | if updater.update_link is not None:
456 | row.operator(
457 | "wm.url_open",
458 | text="Direct download").url = updater.update_link
459 | else:
460 | row.operator(
461 | "wm.url_open",
462 | text="(failed to retrieve direct download)")
463 | row.enabled = False
464 |
465 | if updater.website is not None:
466 | row = layout.row()
467 | ops = row.operator("wm.url_open", text="Open website")
468 | ops.url = updater.website
469 | else:
470 | row = layout.row()
471 | row.label(text="See source website to download the update")
472 |
473 | def execute(self, context):
474 | return {'FINISHED'}
475 |
476 |
477 | class AddonUpdaterUpdatedSuccessful(bpy.types.Operator):
478 | """Addon in place, popup telling user it completed or what went wrong"""
479 | bl_label = "Installation Report"
480 | bl_idname = updater.addon + ".updater_update_successful"
481 | bl_description = "Update installation response"
482 | bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
483 |
484 | error = bpy.props.StringProperty(
485 | name="Error Occurred",
486 | default="",
487 | options={'HIDDEN'}
488 | )
489 |
490 | def invoke(self, context, event):
491 | return context.window_manager.invoke_props_popup(self, event)
492 |
493 | def draw(self, context):
494 | layout = self.layout
495 |
496 | if updater.invalid_updater:
497 | layout.label(text="Updater error")
498 | return
499 |
500 | saved = updater.json
501 | if self.error != "":
502 | col = layout.column()
503 | col.scale_y = 0.7
504 | col.label(text="Error occurred, did not install", icon="ERROR")
505 | if updater.error_msg:
506 | msg = updater.error_msg
507 | else:
508 | msg = self.error
509 | col.label(text=str(msg), icon="BLANK1")
510 | rw = col.row()
511 | rw.scale_y = 2
512 | rw.operator(
513 | "wm.url_open",
514 | text="Click for manual download.",
515 | icon="BLANK1").url = updater.website
516 | elif not updater.auto_reload_post_update:
517 | # Tell user to restart blender after an update/restore!
518 | if "just_restored" in saved and saved["just_restored"]:
519 | col = layout.column()
520 | col.label(text="Addon restored", icon="RECOVER_LAST")
521 | alert_row = col.row()
522 | alert_row.alert = True
523 | alert_row.operator(
524 | "wm.quit_blender",
525 | text="Restart blender to reload",
526 | icon="BLANK1")
527 | updater.json_reset_restore()
528 | else:
529 | col = layout.column()
530 | col.label(
531 | text="Addon successfully installed", icon="FILE_TICK")
532 | alert_row = col.row()
533 | alert_row.alert = True
534 | alert_row.operator(
535 | "wm.quit_blender",
536 | text="Restart blender to reload",
537 | icon="BLANK1")
538 |
539 | else:
540 | # reload addon, but still recommend they restart blender
541 | if "just_restored" in saved and saved["just_restored"]:
542 | col = layout.column()
543 | col.scale_y = 0.7
544 | col.label(text="Addon restored", icon="RECOVER_LAST")
545 | col.label(
546 | text="Consider restarting blender to fully reload.",
547 | icon="BLANK1")
548 | updater.json_reset_restore()
549 | else:
550 | col = layout.column()
551 | col.scale_y = 0.7
552 | col.label(
553 | text="Addon successfully installed", icon="FILE_TICK")
554 | col.label(
555 | text="Consider restarting blender to fully reload.",
556 | icon="BLANK1")
557 |
558 | def execute(self, context):
559 | return {'FINISHED'}
560 |
561 |
562 | class AddonUpdaterRestoreBackup(bpy.types.Operator):
563 | """Restore addon from backup"""
564 | bl_label = "Restore backup"
565 | bl_idname = updater.addon + ".updater_restore_backup"
566 | bl_description = "Restore addon from backup"
567 | bl_options = {'REGISTER', 'INTERNAL'}
568 |
569 | @classmethod
570 | def poll(cls, context):
571 | try:
572 | return os.path.isdir(os.path.join(updater.stage_path, "backup"))
573 | except:
574 | return False
575 |
576 | def execute(self, context):
577 | # in case of error importing updater
578 | if updater.invalid_updater:
579 | return {'CANCELLED'}
580 | updater.restore_backup()
581 | return {'FINISHED'}
582 |
583 |
584 | class AddonUpdaterIgnore(bpy.types.Operator):
585 | """Ignore update to prevent future popups"""
586 | bl_label = "Ignore update"
587 | bl_idname = updater.addon + ".updater_ignore"
588 | bl_description = "Ignore update to prevent future popups"
589 | bl_options = {'REGISTER', 'INTERNAL'}
590 |
591 | @classmethod
592 | def poll(cls, context):
593 | if updater.invalid_updater:
594 | return False
595 | elif updater.update_ready:
596 | return True
597 | else:
598 | return False
599 |
600 | def execute(self, context):
601 | # in case of error importing updater
602 | if updater.invalid_updater:
603 | return {'CANCELLED'}
604 | updater.ignore_update()
605 | self.report({"INFO"}, "Open addon preferences for updater options")
606 | return {'FINISHED'}
607 |
608 |
609 | class AddonUpdaterEndBackground(bpy.types.Operator):
610 | """Stop checking for update in the background"""
611 | bl_label = "End background check"
612 | bl_idname = updater.addon + ".end_background_check"
613 | bl_description = "Stop checking for update in the background"
614 | bl_options = {'REGISTER', 'INTERNAL'}
615 |
616 | def execute(self, context):
617 | # in case of error importing updater
618 | if updater.invalid_updater:
619 | return {'CANCELLED'}
620 | updater.stop_async_check_update()
621 | return {'FINISHED'}
622 |
623 |
624 | # -----------------------------------------------------------------------------
625 | # Handler related, to create popups
626 | # -----------------------------------------------------------------------------
627 |
628 |
629 | # global vars used to prevent duplicate popup handlers
630 | ran_auto_check_install_popup = False
631 | ran_update_success_popup = False
632 |
633 | # global var for preventing successive calls
634 | ran_background_check = False
635 |
636 |
637 | @persistent
638 | def updater_run_success_popup_handler(scene):
639 | global ran_update_success_popup
640 | ran_update_success_popup = True
641 |
642 | # in case of error importing updater
643 | if updater.invalid_updater:
644 | return
645 |
646 | try:
647 | if "scene_update_post" in dir(bpy.app.handlers):
648 | bpy.app.handlers.scene_update_post.remove(
649 | updater_run_success_popup_handler)
650 | else:
651 | bpy.app.handlers.depsgraph_update_post.remove(
652 | updater_run_success_popup_handler)
653 | except:
654 | pass
655 |
656 | atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
657 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
658 |
659 |
660 | @persistent
661 | def updater_run_install_popup_handler(scene):
662 | global ran_auto_check_install_popup
663 | ran_auto_check_install_popup = True
664 | updater.print_verbose("Running the install popup handler.")
665 |
666 | # in case of error importing updater
667 | if updater.invalid_updater:
668 | return
669 |
670 | try:
671 | if "scene_update_post" in dir(bpy.app.handlers):
672 | bpy.app.handlers.scene_update_post.remove(
673 | updater_run_install_popup_handler)
674 | else:
675 | bpy.app.handlers.depsgraph_update_post.remove(
676 | updater_run_install_popup_handler)
677 | except:
678 | pass
679 |
680 | if "ignore" in updater.json and updater.json["ignore"]:
681 | return # Don't do popup if ignore pressed.
682 | elif "version_text" in updater.json and updater.json["version_text"].get("version"):
683 | version = updater.json["version_text"]["version"]
684 | ver_tuple = updater.version_tuple_from_text(version)
685 |
686 | if ver_tuple < updater.current_version:
687 | # User probably manually installed to get the up to date addon
688 | # in here. Clear out the update flag using this function.
689 | updater.print_verbose(
690 | "{} updater: appears user updated, clearing flag".format(
691 | updater.addon))
692 | updater.json_reset_restore()
693 | return
694 | atr = AddonUpdaterInstallPopup.bl_idname.split(".")
695 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
696 |
697 |
698 | def background_update_callback(update_ready):
699 | """Passed into the updater, background thread updater"""
700 | global ran_auto_check_install_popup
701 | updater.print_verbose("Running background update callback")
702 |
703 | # In case of error importing updater.
704 | if updater.invalid_updater:
705 | return
706 | if not updater.show_popups:
707 | return
708 | if not update_ready:
709 | return
710 |
711 | # See if we need add to the update handler to trigger the popup.
712 | handlers = []
713 | if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
714 | handlers = bpy.app.handlers.scene_update_post
715 | else: # 2.8+
716 | handlers = bpy.app.handlers.depsgraph_update_post
717 | in_handles = updater_run_install_popup_handler in handlers
718 |
719 | if in_handles or ran_auto_check_install_popup:
720 | return
721 |
722 | if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
723 | bpy.app.handlers.scene_update_post.append(
724 | updater_run_install_popup_handler)
725 | else: # 2.8+
726 | bpy.app.handlers.depsgraph_update_post.append(
727 | updater_run_install_popup_handler)
728 | ran_auto_check_install_popup = True
729 | updater.print_verbose("Attempted popup prompt")
730 |
731 |
732 | def post_update_callback(module_name, res=None):
733 | """Callback for once the run_update function has completed.
734 |
735 | Only makes sense to use this if "auto_reload_post_update" == False,
736 | i.e. don't auto-restart the addon.
737 |
738 | Arguments:
739 | module_name: returns the module name from updater, but unused here.
740 | res: If an error occurred, this is the detail string.
741 | """
742 |
743 | # In case of error importing updater.
744 | if updater.invalid_updater:
745 | return
746 |
747 | if res is None:
748 | # This is the same code as in conditional at the end of the register
749 | # function, ie if "auto_reload_post_update" == True, skip code.
750 | updater.print_verbose(
751 | "{} updater: Running post update callback".format(updater.addon))
752 |
753 | atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
754 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
755 | global ran_update_success_popup
756 | ran_update_success_popup = True
757 | else:
758 | # Some kind of error occurred and it was unable to install, offer
759 | # manual download instead.
760 | atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
761 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT', error=res)
762 | return
763 |
764 |
765 | def ui_refresh(update_status):
766 | """Redraw the ui once an async thread has completed"""
767 | for windowManager in bpy.data.window_managers:
768 | for window in windowManager.windows:
769 | for area in window.screen.areas:
770 | area.tag_redraw()
771 |
772 |
773 | def check_for_update_background():
774 | """Function for asynchronous background check.
775 |
776 | *Could* be called on register, but would be bad practice as the bare
777 | minimum code should run at the moment of registration (addon ticked).
778 | """
779 | if updater.invalid_updater:
780 | return
781 | global ran_background_check
782 | if ran_background_check:
783 | # Global var ensures check only happens once.
784 | return
785 | elif updater.update_ready is not None or updater.async_checking:
786 | # Check already happened.
787 | # Used here to just avoid constant applying settings below.
788 | return
789 |
790 | # Apply the UI settings.
791 | settings = get_user_preferences(bpy.context)
792 | if not settings:
793 | return
794 | updater.set_check_interval(enabled=settings.auto_check_update,
795 | months=settings.updater_interval_months,
796 | days=settings.updater_interval_days,
797 | hours=settings.updater_interval_hours,
798 | minutes=settings.updater_interval_minutes)
799 |
800 | # Input is an optional callback function. This function should take a bool
801 | # input, if true: update ready, if false: no update ready.
802 | updater.check_for_update_async(background_update_callback)
803 | ran_background_check = True
804 |
805 |
806 | def check_for_update_nonthreaded(self, context):
807 | """Can be placed in front of other operators to launch when pressed"""
808 | if updater.invalid_updater:
809 | return
810 |
811 | # Only check if it's ready, ie after the time interval specified should
812 | # be the async wrapper call here.
813 | settings = get_user_preferences(bpy.context)
814 | if not settings:
815 | if updater.verbose:
816 | print("Could not get {} preferences, update check skipped".format(
817 | __package__))
818 | return
819 | updater.set_check_interval(enabled=settings.auto_check_update,
820 | months=settings.updater_interval_months,
821 | days=settings.updater_interval_days,
822 | hours=settings.updater_interval_hours,
823 | minutes=settings.updater_interval_minutes)
824 |
825 | (update_ready, version, link) = updater.check_for_update(now=False)
826 | if update_ready:
827 | atr = AddonUpdaterInstallPopup.bl_idname.split(".")
828 | getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
829 | else:
830 | updater.print_verbose("No update ready")
831 | self.report({'INFO'}, "No update ready")
832 |
833 |
834 | def show_reload_popup():
835 | """For use in register only, to show popup after re-enabling the addon.
836 |
837 | Must be enabled by developer.
838 | """
839 | if updater.invalid_updater:
840 | return
841 | saved_state = updater.json
842 | global ran_update_success_popup
843 |
844 | has_state = saved_state is not None
845 | just_updated = "just_updated" in saved_state
846 | updated_info = saved_state["just_updated"]
847 |
848 | if not (has_state and just_updated and updated_info):
849 | return
850 |
851 | updater.json_reset_postupdate() # So this only runs once.
852 |
853 | # No handlers in this case.
854 | if not updater.auto_reload_post_update:
855 | return
856 |
857 | # See if we need add to the update handler to trigger the popup.
858 | handlers = []
859 | if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
860 | handlers = bpy.app.handlers.scene_update_post
861 | else: # 2.8+
862 | handlers = bpy.app.handlers.depsgraph_update_post
863 | in_handles = updater_run_success_popup_handler in handlers
864 |
865 | if in_handles or ran_update_success_popup:
866 | return
867 |
868 | if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
869 | bpy.app.handlers.scene_update_post.append(
870 | updater_run_success_popup_handler)
871 | else: # 2.8+
872 | bpy.app.handlers.depsgraph_update_post.append(
873 | updater_run_success_popup_handler)
874 | ran_update_success_popup = True
875 |
876 |
877 | # -----------------------------------------------------------------------------
878 | # Example UI integrations
879 | # -----------------------------------------------------------------------------
880 | def update_notice_box_ui(self, context):
881 | """Update notice draw, to add to the end or beginning of a panel.
882 |
883 | After a check for update has occurred, this function will draw a box
884 | saying an update is ready, and give a button for: update now, open website,
885 | or ignore popup. Ideal to be placed at the end / beginning of a panel.
886 | """
887 |
888 | if updater.invalid_updater:
889 | return
890 |
891 | saved_state = updater.json
892 | if not updater.auto_reload_post_update:
893 | if "just_updated" in saved_state and saved_state["just_updated"]:
894 | layout = self.layout
895 | box = layout.box()
896 | col = box.column()
897 | alert_row = col.row()
898 | alert_row.alert = True
899 | alert_row.operator(
900 | "wm.quit_blender",
901 | text="Restart blender",
902 | icon="ERROR")
903 | col.label(text="to complete update")
904 | return
905 |
906 | # If user pressed ignore, don't draw the box.
907 | if "ignore" in updater.json and updater.json["ignore"]:
908 | return
909 | if not updater.update_ready:
910 | return
911 |
912 | layout = self.layout
913 | box = layout.box()
914 | col = box.column(align=True)
915 | col.alert = True
916 | col.label(text="Update ready!", icon="ERROR")
917 | col.alert = False
918 | col.separator()
919 | row = col.row(align=True)
920 | split = row.split(align=True)
921 | colL = split.column(align=True)
922 | colL.scale_y = 1.5
923 | colL.operator(AddonUpdaterIgnore.bl_idname, icon="X", text="Ignore")
924 | colR = split.column(align=True)
925 | colR.scale_y = 1.5
926 | if not updater.manual_only:
927 | colR.operator(AddonUpdaterUpdateNow.bl_idname,
928 | text="Update", icon="LOOP_FORWARDS")
929 | col.operator("wm.url_open", text="Open website").url = updater.website
930 | # ops = col.operator("wm.url_open",text="Direct download")
931 | # ops.url=updater.update_link
932 | col.operator(AddonUpdaterInstallManually.bl_idname,
933 | text="Install manually")
934 | else:
935 | # ops = col.operator("wm.url_open", text="Direct download")
936 | # ops.url=updater.update_link
937 | col.operator("wm.url_open", text="Get it now").url = updater.website
938 |
939 |
940 | def update_settings_ui(self, context, element=None):
941 | """Preferences - for drawing with full width inside user preferences
942 |
943 | A function that can be run inside user preferences panel for prefs UI.
944 | Place inside UI draw using:
945 | addon_updater_ops.update_settings_ui(self, context)
946 | or by:
947 | addon_updater_ops.update_settings_ui(context)
948 | """
949 |
950 | # Element is a UI element, such as layout, a row, column, or box.
951 | if element is None:
952 | element = self.layout
953 | box = element.box()
954 |
955 | # In case of error importing updater.
956 | if updater.invalid_updater:
957 | box.label(text="Error initializing updater code:")
958 | box.label(text=updater.error_msg)
959 | return
960 | settings = get_user_preferences(context)
961 | if not settings:
962 | box.label(text="Error getting updater preferences", icon='ERROR')
963 | return
964 |
965 | # auto-update settings
966 | box.label(text="Updater Settings")
967 | row = box.row()
968 |
969 | # special case to tell user to restart blender, if set that way
970 | if not updater.auto_reload_post_update:
971 | saved_state = updater.json
972 | if "just_updated" in saved_state and saved_state["just_updated"]:
973 | row.alert = True
974 | row.operator("wm.quit_blender",
975 | text="Restart blender to complete update",
976 | icon="ERROR")
977 | return
978 |
979 | split = layout_split(row, factor=0.4)
980 | sub_col = split.column()
981 | sub_col.prop(settings, "auto_check_update")
982 | sub_col = split.column()
983 |
984 | if not settings.auto_check_update:
985 | sub_col.enabled = False
986 | sub_row = sub_col.row()
987 | sub_row.label(text="Interval between checks")
988 | sub_row = sub_col.row(align=True)
989 | check_col = sub_row.column(align=True)
990 | check_col.prop(settings, "updater_interval_months")
991 | check_col = sub_row.column(align=True)
992 | check_col.prop(settings, "updater_interval_days")
993 | check_col = sub_row.column(align=True)
994 |
995 | # Consider un-commenting for local dev (e.g. to set shorter intervals)
996 | # check_col.prop(settings,"updater_interval_hours")
997 | # check_col = sub_row.column(align=True)
998 | # check_col.prop(settings,"updater_interval_minutes")
999 |
1000 | # Checking / managing updates.
1001 | row = box.row()
1002 | col = row.column()
1003 | if updater.error is not None:
1004 | sub_col = col.row(align=True)
1005 | sub_col.scale_y = 1
1006 | split = sub_col.split(align=True)
1007 | split.scale_y = 2
1008 | if "ssl" in updater.error_msg.lower():
1009 | split.enabled = True
1010 | split.operator(AddonUpdaterInstallManually.bl_idname,
1011 | text=updater.error)
1012 | else:
1013 | split.enabled = False
1014 | split.operator(AddonUpdaterCheckNow.bl_idname,
1015 | text=updater.error)
1016 | split = sub_col.split(align=True)
1017 | split.scale_y = 2
1018 | split.operator(AddonUpdaterCheckNow.bl_idname,
1019 | text="", icon="FILE_REFRESH")
1020 |
1021 | elif updater.update_ready is None and not updater.async_checking:
1022 | col.scale_y = 2
1023 | col.operator(AddonUpdaterCheckNow.bl_idname)
1024 | elif updater.update_ready is None: # async is running
1025 | sub_col = col.row(align=True)
1026 | sub_col.scale_y = 1
1027 | split = sub_col.split(align=True)
1028 | split.enabled = False
1029 | split.scale_y = 2
1030 | split.operator(AddonUpdaterCheckNow.bl_idname, text="Checking...")
1031 | split = sub_col.split(align=True)
1032 | split.scale_y = 2
1033 | split.operator(AddonUpdaterEndBackground.bl_idname, text="", icon="X")
1034 |
1035 | elif updater.include_branches and \
1036 | len(updater.tags) == len(updater.include_branch_list) and not \
1037 | updater.manual_only:
1038 | # No releases found, but still show the appropriate branch.
1039 | sub_col = col.row(align=True)
1040 | sub_col.scale_y = 1
1041 | split = sub_col.split(align=True)
1042 | split.scale_y = 2
1043 | update_now_txt = "Update directly to {}".format(
1044 | updater.include_branch_list[0])
1045 | split.operator(AddonUpdaterUpdateNow.bl_idname, text=update_now_txt)
1046 | split = sub_col.split(align=True)
1047 | split.scale_y = 2
1048 | split.operator(AddonUpdaterCheckNow.bl_idname,
1049 | text="", icon="FILE_REFRESH")
1050 |
1051 | elif updater.update_ready and not updater.manual_only:
1052 | sub_col = col.row(align=True)
1053 | sub_col.scale_y = 1
1054 | split = sub_col.split(align=True)
1055 | split.scale_y = 2
1056 | split.operator(AddonUpdaterUpdateNow.bl_idname,
1057 | text="Update now to " + str(updater.update_version))
1058 | split = sub_col.split(align=True)
1059 | split.scale_y = 2
1060 | split.operator(AddonUpdaterCheckNow.bl_idname,
1061 | text="", icon="FILE_REFRESH")
1062 |
1063 | elif updater.update_ready and updater.manual_only:
1064 | col.scale_y = 2
1065 | dl_now_txt = "Download " + str(updater.update_version)
1066 | col.operator("wm.url_open",
1067 | text=dl_now_txt).url = updater.website
1068 | else: # i.e. that updater.update_ready == False.
1069 | sub_col = col.row(align=True)
1070 | sub_col.scale_y = 1
1071 | split = sub_col.split(align=True)
1072 | split.enabled = False
1073 | split.scale_y = 2
1074 | split.operator(AddonUpdaterCheckNow.bl_idname,
1075 | text="Addon is up to date")
1076 | split = sub_col.split(align=True)
1077 | split.scale_y = 2
1078 | split.operator(AddonUpdaterCheckNow.bl_idname,
1079 | text="", icon="FILE_REFRESH")
1080 |
1081 | if not updater.manual_only:
1082 | col = row.column(align=True)
1083 | if updater.include_branches and len(updater.include_branch_list) > 0:
1084 | branch = updater.include_branch_list[0]
1085 | col.operator(AddonUpdaterUpdateTarget.bl_idname,
1086 | text="Install {} / old version".format(branch))
1087 | else:
1088 | col.operator(AddonUpdaterUpdateTarget.bl_idname,
1089 | text="(Re)install addon version")
1090 | last_date = "none found"
1091 | backup_path = os.path.join(updater.stage_path, "backup")
1092 | if "backup_date" in updater.json and os.path.isdir(backup_path):
1093 | if updater.json["backup_date"] == "":
1094 | last_date = "Date not found"
1095 | else:
1096 | last_date = updater.json["backup_date"]
1097 | backup_text = "Restore addon backup ({})".format(last_date)
1098 | col.operator(AddonUpdaterRestoreBackup.bl_idname, text=backup_text)
1099 |
1100 | row = box.row()
1101 | row.scale_y = 0.7
1102 | last_check = updater.json["last_check"]
1103 | if updater.error is not None and updater.error_msg is not None:
1104 | row.label(text=updater.error_msg)
1105 | elif last_check:
1106 | last_check = last_check[0: last_check.index(".")]
1107 | row.label(text="Last update check: " + last_check)
1108 | else:
1109 | row.label(text="Last update check: Never")
1110 |
1111 |
1112 | def update_settings_ui_condensed(self, context, element=None):
1113 | """Preferences - Condensed drawing within preferences.
1114 |
1115 | Alternate draw for user preferences or other places, does not draw a box.
1116 | """
1117 |
1118 | # Element is a UI element, such as layout, a row, column, or box.
1119 | if element is None:
1120 | element = self.layout
1121 | row = element.row()
1122 |
1123 | # In case of error importing updater.
1124 | if updater.invalid_updater:
1125 | row.label(text="Error initializing updater code:")
1126 | row.label(text=updater.error_msg)
1127 | return
1128 | settings = get_user_preferences(context)
1129 | if not settings:
1130 | row.label(text="Error getting updater preferences", icon='ERROR')
1131 | return
1132 |
1133 | # Special case to tell user to restart blender, if set that way.
1134 | if not updater.auto_reload_post_update:
1135 | saved_state = updater.json
1136 | if "just_updated" in saved_state and saved_state["just_updated"]:
1137 | row.alert = True # mark red
1138 | row.operator(
1139 | "wm.quit_blender",
1140 | text="Restart blender to complete update",
1141 | icon="ERROR")
1142 | return
1143 |
1144 | col = row.column()
1145 | if updater.error is not None:
1146 | sub_col = col.row(align=True)
1147 | sub_col.scale_y = 1
1148 | split = sub_col.split(align=True)
1149 | split.scale_y = 2
1150 | if "ssl" in updater.error_msg.lower():
1151 | split.enabled = True
1152 | split.operator(AddonUpdaterInstallManually.bl_idname,
1153 | text=updater.error)
1154 | else:
1155 | split.enabled = False
1156 | split.operator(AddonUpdaterCheckNow.bl_idname,
1157 | text=updater.error)
1158 | split = sub_col.split(align=True)
1159 | split.scale_y = 2
1160 | split.operator(AddonUpdaterCheckNow.bl_idname,
1161 | text="", icon="FILE_REFRESH")
1162 |
1163 | elif updater.update_ready is None and not updater.async_checking:
1164 | col.scale_y = 2
1165 | col.operator(AddonUpdaterCheckNow.bl_idname)
1166 | elif updater.update_ready is None: # Async is running.
1167 | sub_col = col.row(align=True)
1168 | sub_col.scale_y = 1
1169 | split = sub_col.split(align=True)
1170 | split.enabled = False
1171 | split.scale_y = 2
1172 | split.operator(AddonUpdaterCheckNow.bl_idname, text="Checking...")
1173 | split = sub_col.split(align=True)
1174 | split.scale_y = 2
1175 | split.operator(AddonUpdaterEndBackground.bl_idname, text="", icon="X")
1176 |
1177 | elif updater.include_branches and \
1178 | len(updater.tags) == len(updater.include_branch_list) and not \
1179 | updater.manual_only:
1180 | # No releases found, but still show the appropriate branch.
1181 | sub_col = col.row(align=True)
1182 | sub_col.scale_y = 1
1183 | split = sub_col.split(align=True)
1184 | split.scale_y = 2
1185 | now_txt = "Update directly to " + str(updater.include_branch_list[0])
1186 | split.operator(AddonUpdaterUpdateNow.bl_idname, text=now_txt)
1187 | split = sub_col.split(align=True)
1188 | split.scale_y = 2
1189 | split.operator(AddonUpdaterCheckNow.bl_idname,
1190 | text="", icon="FILE_REFRESH")
1191 |
1192 | elif updater.update_ready and not updater.manual_only:
1193 | sub_col = col.row(align=True)
1194 | sub_col.scale_y = 1
1195 | split = sub_col.split(align=True)
1196 | split.scale_y = 2
1197 | split.operator(AddonUpdaterUpdateNow.bl_idname,
1198 | text="Update now to " + str(updater.update_version))
1199 | split = sub_col.split(align=True)
1200 | split.scale_y = 2
1201 | split.operator(AddonUpdaterCheckNow.bl_idname,
1202 | text="", icon="FILE_REFRESH")
1203 |
1204 | elif updater.update_ready and updater.manual_only:
1205 | col.scale_y = 2
1206 | dl_txt = "Download " + str(updater.update_version)
1207 | col.operator("wm.url_open", text=dl_txt).url = updater.website
1208 | else: # i.e. that updater.update_ready == False.
1209 | sub_col = col.row(align=True)
1210 | sub_col.scale_y = 1
1211 | split = sub_col.split(align=True)
1212 | split.enabled = False
1213 | split.scale_y = 2
1214 | split.operator(AddonUpdaterCheckNow.bl_idname,
1215 | text="Addon is up to date")
1216 | split = sub_col.split(align=True)
1217 | split.scale_y = 2
1218 | split.operator(AddonUpdaterCheckNow.bl_idname,
1219 | text="", icon="FILE_REFRESH")
1220 |
1221 | row = element.row()
1222 | row.prop(settings, "auto_check_update")
1223 |
1224 | row = element.row()
1225 | row.scale_y = 0.7
1226 | last_check = updater.json["last_check"]
1227 | if updater.error is not None and updater.error_msg is not None:
1228 | row.label(text=updater.error_msg)
1229 | elif last_check != "" and last_check is not None:
1230 | last_check = last_check[0: last_check.index(".")]
1231 | row.label(text="Last check: " + last_check)
1232 | else:
1233 | row.label(text="Last check: Never")
1234 |
1235 |
1236 | def skip_tag_function(self, tag):
1237 | """A global function for tag skipping.
1238 |
1239 | A way to filter which tags are displayed, e.g. to limit downgrading too
1240 | long ago.
1241 |
1242 | Args:
1243 | self: The instance of the singleton addon update.
1244 | tag: the text content of a tag from the repo, e.g. "v1.2.3".
1245 |
1246 | Returns:
1247 | bool: True to skip this tag name (ie don't allow for downloading this
1248 | version), or False if the tag is allowed.
1249 | """
1250 |
1251 | # In case of error importing updater.
1252 | if self.invalid_updater:
1253 | return False
1254 |
1255 | # ---- write any custom code here, return true to disallow version ---- #
1256 | #
1257 | # # Filter out e.g. if 'beta' is in name of release
1258 | # if 'beta' in tag.lower():
1259 | # return True
1260 | # ---- write any custom code above, return true to disallow version --- #
1261 |
1262 | if self.include_branches:
1263 | for branch in self.include_branch_list:
1264 | if tag["name"].lower() == branch:
1265 | return False
1266 |
1267 | # Function converting string to tuple, ignoring e.g. leading 'v'.
1268 | # Be aware that this strips out other text that you might otherwise
1269 | # want to be kept and accounted for when checking tags (e.g. v1.1a vs 1.1b)
1270 | tupled = self.version_tuple_from_text(tag["name"])
1271 | if not isinstance(tupled, tuple):
1272 | return True
1273 |
1274 | # Select the min tag version - change tuple accordingly.
1275 | if self.version_min_update is not None:
1276 | if tupled < self.version_min_update:
1277 | return True # Skip if current version below this.
1278 |
1279 | # Select the max tag version.
1280 | if self.version_max_update is not None:
1281 | if tupled >= self.version_max_update:
1282 | return True # Skip if current version at or above this.
1283 |
1284 | # In all other cases, allow showing the tag for updating/reverting.
1285 | # To simply and always show all tags, this return False could be moved
1286 | # to the start of the function definition so all tags are allowed.
1287 | return False
1288 |
1289 |
1290 | def select_link_function(self, tag):
1291 | """Only customize if trying to leverage "attachments" in *GitHub* releases.
1292 |
1293 | A way to select from one or multiple attached downloadable files from the
1294 | server, instead of downloading the default release/tag source code.
1295 | """
1296 |
1297 | # -- Default, universal case (and is the only option for GitLab/Bitbucket)
1298 | link = tag["zipball_url"]
1299 |
1300 | # -- Example: select the first (or only) asset instead source code --
1301 | # if "assets" in tag and "browser_download_url" in tag["assets"][0]:
1302 | # link = tag["assets"][0]["browser_download_url"]
1303 |
1304 | # -- Example: select asset based on OS, where multiple builds exist --
1305 | # # not tested/no error checking, modify to fit your own needs!
1306 | # # assume each release has three attached builds:
1307 | # # release_windows.zip, release_OSX.zip, release_linux.zip
1308 | # # This also would logically not be used with "branches" enabled
1309 | # if platform.system() == "Darwin": # ie OSX
1310 | # link = [asset for asset in tag["assets"] if 'OSX' in asset][0]
1311 | # elif platform.system() == "Windows":
1312 | # link = [asset for asset in tag["assets"] if 'windows' in asset][0]
1313 | # elif platform.system() == "Linux":
1314 | # link = [asset for asset in tag["assets"] if 'linux' in asset][0]
1315 |
1316 | return link
1317 |
1318 |
1319 | # -----------------------------------------------------------------------------
1320 | # Register, should be run in the register module itself
1321 | # -----------------------------------------------------------------------------
1322 | classes = (
1323 | AddonUpdaterInstallPopup,
1324 | AddonUpdaterCheckNow,
1325 | AddonUpdaterUpdateNow,
1326 | AddonUpdaterUpdateTarget,
1327 | AddonUpdaterInstallManually,
1328 | AddonUpdaterUpdatedSuccessful,
1329 | AddonUpdaterRestoreBackup,
1330 | AddonUpdaterIgnore,
1331 | AddonUpdaterEndBackground
1332 | )
1333 |
1334 |
1335 | def register(bl_info):
1336 | """Registering the operators in this module"""
1337 | # Safer failure in case of issue loading module.
1338 | if updater.error:
1339 | print("Exiting updater registration, " + updater.error)
1340 | return
1341 | updater.clear_state() # Clear internal vars, avoids reloading oddities.
1342 |
1343 | # Confirm your updater "engine" (Github is default if not specified).
1344 | updater.engine = "Github"
1345 | # updater.engine = "GitLab"
1346 | # updater.engine = "Bitbucket"
1347 |
1348 | # If using private repository, indicate the token here
1349 | # Must be set after assigning the engine.
1350 | # **WARNING** Depending on the engine, this token can act like a password!!
1351 | # Only provide a token if the project is *non-public*, see readme for
1352 | # other considerations and suggestions from a security standpoint.
1353 | updater.private_token = None # "tokenstring"
1354 |
1355 | # Choose your own username, must match website (not needed for GitLab).
1356 | updater.user = "johnzero7"
1357 |
1358 | # Choose your own repository, must match git name for GitHUb and Bitbucket,
1359 | # for GitLab use project ID (numbers only).
1360 | updater.repo = "HaydeeTools"
1361 |
1362 | #updater.addon = # define at top of module, MUST be done first
1363 |
1364 | # Website for manual addon download, optional but recommended to set
1365 | updater.website = "https://github.com/johnzero7/HaydeeTools/"
1366 |
1367 | # Addon subfolder path
1368 | # "sample/path/to/addon"
1369 | # default is "" or None, meaning root
1370 | updater.subfolder_path = ""
1371 |
1372 | # Used to check/compare versions.
1373 | updater.current_version = bl_info["version"]
1374 |
1375 | # Optional, to hard-set update frequency, use this here - however, this
1376 | # demo has this set via UI properties.
1377 | # updater.set_check_interval(enabled=False, months=0, days=0, hours=0, minutes=2)
1378 |
1379 | # Optional, consider turning off for production or allow as an option
1380 | # This will print out additional debugging info to the console
1381 | updater.verbose = True # make False for production default
1382 |
1383 | # Optional, customize where the addon updater processing subfolder is,
1384 | # essentially a staging folder used by the updater on its own
1385 | # Needs to be within the same folder as the addon itself
1386 | # Need to supply a full, absolute path to folder
1387 | # updater.updater_path = # set path of updater folder, by default:
1388 | # /addons/{__package__}/{__package__}_updater
1389 |
1390 | # Auto create a backup of the addon when installing other versions.
1391 | updater.backup_current = True # True by default
1392 |
1393 | # Sample ignore patterns for when creating backup of current during update.
1394 | updater.backup_ignore_patterns = ["__pycache__"]
1395 | # Alternate example patterns:
1396 | # updater.backup_ignore_patterns = [".git", "__pycache__", "*.bat", ".gitignore", "*.exe"]
1397 |
1398 | # Patterns for files to actively overwrite if found in new update file and
1399 | # are also found in the currently installed addon. Note that by default
1400 | # (ie if set to []), updates are installed in the same way as blender:
1401 | # .py files are replaced, but other file types (e.g. json, txt, blend)
1402 | # will NOT be overwritten if already present in current install. Thus
1403 | # if you want to automatically update resources/non py files, add them
1404 | # as a part of the pattern list below so they will always be overwritten by an
1405 | # update. If a pattern file is not found in new update, no action is taken
1406 | # NOTE: This does NOT delete anything proactively, rather only defines what
1407 | # is allowed to be overwritten during an update execution.
1408 | updater.overwrite_patterns = ["*.png", "*.jpg", "README.md", "LICENSE.txt"]
1409 | # updater.overwrite_patterns = []
1410 | # other examples:
1411 | # ["*"] means ALL files/folders will be overwritten by update, was the
1412 | # behavior pre updater v1.0.4.
1413 | # [] or ["*.py","*.pyc"] matches default blender behavior, ie same effect
1414 | # if user installs update manually without deleting the existing addon
1415 | # first e.g. if existing install and update both have a resource.blend
1416 | # file, the existing installed one will remain.
1417 | # ["some.py"] means if some.py is found in addon update, it will overwrite
1418 | # any existing some.py in current addon install, if any.
1419 | # ["*.json"] means all json files found in addon update will overwrite
1420 | # those of same name in current install.
1421 | # ["*.png","README.md","LICENSE.txt"] means the readme, license, and all
1422 | # pngs will be overwritten by update.
1423 |
1424 | # Patterns for files to actively remove prior to running update.
1425 | # Useful if wanting to remove old code due to changes in filenames
1426 | # that otherwise would accumulate. Note: this runs after taking
1427 | # a backup (if enabled) but before placing in new update. If the same
1428 | # file name removed exists in the update, then it acts as if pattern
1429 | # is placed in the overwrite_patterns property. Note this is effectively
1430 | # ignored if clean=True in the run_update method.
1431 | updater.remove_pre_update_patterns = ["*.py", "*.pyc"]
1432 | # Note setting ["*"] here is equivalent to always running updates with
1433 | # clean = True in the run_update method, ie the equivalent of a fresh,
1434 | # new install. This would also delete any resources or user-made/modified
1435 | # files setting ["__pycache__"] ensures the pycache folder always removed.
1436 | # The configuration of ["*.py","*.pyc"] is a safe option as this
1437 | # will ensure no old python files/caches remain in event different addon
1438 | # versions have different filenames or structures
1439 |
1440 | # Allow branches like 'master' as an option to update to, regardless
1441 | # of release or version.
1442 | # Default behavior: releases will still be used for auto check (popup),
1443 | # but the user has the option from user preferences to directly
1444 | # update to the master branch or any other branches specified using
1445 | # the "install {branch}/older version" operator.
1446 | updater.include_branches = True
1447 |
1448 | # (GitHub only) This options allows using "releases" instead of "tags",
1449 | # which enables pulling down release logs/notes, as well as installs update
1450 | # from release-attached zips (instead of the auto-packaged code generated
1451 | # with a release/tag). Setting has no impact on BitBucket or GitLab repos.
1452 | updater.use_releases = False
1453 | # Note: Releases always have a tag, but a tag may not always be a release.
1454 | # Therefore, setting True above will filter out any non-annotated tags.
1455 | # Note 2: Using this option will also display (and filter by) the release
1456 | # name instead of the tag name, bear this in mind given the
1457 | # skip_tag_function filtering above.
1458 |
1459 | # Populate if using "include_branches" option above.
1460 | # Note: updater.include_branch_list defaults to ['master'] branch if set to
1461 | # none. Example targeting another multiple branches allowed to pull from:
1462 | # updater.include_branch_list = ['master', 'dev']
1463 | updater.include_branch_list = None # None is the equivalent = ['master']
1464 |
1465 | # Only allow manual install, thus prompting the user to open
1466 | # the addon's web page to download, specifically: updater.website
1467 | # Useful if only wanting to get notification of updates but not
1468 | # directly install.
1469 | updater.manual_only = False
1470 |
1471 | # Used for development only, "pretend" to install an update to test
1472 | # reloading conditions.
1473 | updater.fake_install = False # Set to true to test callback/reloading.
1474 |
1475 | # Show popups, ie if auto-check for update is enabled or a previous
1476 | # check for update in user preferences found a new version, show a popup
1477 | # (at most once per blender session, and it provides an option to ignore
1478 | # for future sessions); default behavior is set to True.
1479 | updater.show_popups = True
1480 | # note: if set to false, there will still be an "update ready" box drawn
1481 | # using the `update_notice_box_ui` panel function.
1482 |
1483 | # Override with a custom function on what tags
1484 | # to skip showing for updater; see code for function above.
1485 | # Set the min and max versions allowed to install.
1486 | # Optional, default None
1487 | # min install (>=) will install this and higher
1488 | updater.version_min_update = (1,2,0)
1489 | # updater.version_min_update = None # if not wanting to define a min
1490 |
1491 | # Max install (<) will install strictly anything lower than this version
1492 | # number, useful to limit the max version a given user can install (e.g.
1493 | # if support for a future version of blender is going away, and you don't
1494 | # want users to be prompted to install a non-functioning addon)
1495 | # updater.version_max_update = (9,9,9)
1496 | updater.version_max_update = None # None or default for no max.
1497 |
1498 | # Function defined above, customize as appropriate per repository
1499 | updater.skip_tag = skip_tag_function # min and max used in this function
1500 |
1501 | # Function defined above, optionally customize as needed per repository.
1502 | updater.select_link = select_link_function
1503 |
1504 | # Recommended false to encourage blender restarts on update completion
1505 | # Setting this option to True is NOT as stable as false (could cause
1506 | # blender crashes).
1507 | updater.auto_reload_post_update = False
1508 |
1509 | # The register line items for all operators/panels.
1510 | # If using bpy.utils.register_module(__name__) to register elsewhere
1511 | # in the addon, delete these lines (also from unregister).
1512 | for cls in classes:
1513 | # Apply annotations to remove Blender 2.8+ warnings, no effect on 2.7
1514 | make_annotations(cls)
1515 | # Comment out this line if using bpy.utils.register_module(__name__)
1516 | bpy.utils.register_class(cls)
1517 |
1518 | # Special situation: we just updated the addon, show a popup to tell the
1519 | # user it worked. Could enclosed in try/catch in case other issues arise.
1520 | show_reload_popup()
1521 |
1522 |
1523 | def unregister():
1524 | for cls in reversed(classes):
1525 | # Comment out this line if using bpy.utils.unregister_module(__name__).
1526 | bpy.utils.unregister_class(cls)
1527 |
1528 | # Clear global vars since they may persist if not restarting blender.
1529 | updater.clear_state() # Clear internal vars, avoids reloading oddities.
1530 |
1531 | global ran_auto_check_install_popup
1532 | ran_auto_check_install_popup = False
1533 |
1534 | global ran_update_success_popup
1535 | ran_update_success_popup = False
1536 |
1537 | global ran_background_check
1538 | ran_background_check = False
1539 |
--------------------------------------------------------------------------------
/addon_updater.py:
--------------------------------------------------------------------------------
1 | # ##### BEGIN GPL LICENSE BLOCK #####
2 | #
3 | # This program is free software; you can redistribute it and/or
4 | # modify it under the terms of the GNU General Public License
5 | # as published by the Free Software Foundation; either version 2
6 | # of the License, or (at your option) any later version.
7 | #
8 | # This program is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with this program; if not, write to the Free Software Foundation,
15 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16 | #
17 | # ##### END GPL LICENSE BLOCK #####
18 |
19 |
20 | """
21 | See documentation for usage
22 | https://github.com/CGCookie/blender-addon-updater
23 | """
24 |
25 | __version__ = "1.1.0"
26 |
27 | import errno
28 | import traceback
29 | import platform
30 | import ssl
31 | import urllib.request
32 | import urllib
33 | import os
34 | import json
35 | import zipfile
36 | import shutil
37 | import threading
38 | import fnmatch
39 | from datetime import datetime, timedelta
40 |
41 | # Blender imports, used in limited cases.
42 | import bpy
43 | import addon_utils
44 |
45 | # -----------------------------------------------------------------------------
46 | # The main class
47 | # -----------------------------------------------------------------------------
48 |
49 |
50 | class SingletonUpdater:
51 | """Addon updater service class.
52 |
53 | This is the singleton class to instance once and then reference where
54 | needed throughout the addon. It implements all the interfaces for running
55 | updates.
56 | """
57 | def __init__(self):
58 |
59 | self._engine = GithubEngine()
60 | self._user = None
61 | self._repo = None
62 | self._website = None
63 | self._current_version = None
64 | self._subfolder_path = None
65 | self._tags = list()
66 | self._tag_latest = None
67 | self._tag_names = list()
68 | self._latest_release = None
69 | self._use_releases = False
70 | self._include_branches = False
71 | self._include_branch_list = ['master']
72 | self._include_branch_auto_check = False
73 | self._manual_only = False
74 | self._version_min_update = None
75 | self._version_max_update = None
76 |
77 | # By default, backup current addon on update/target install.
78 | self._backup_current = True
79 | self._backup_ignore_patterns = None
80 |
81 | # Set patterns the files to overwrite during an update.
82 | self._overwrite_patterns = ["*.py", "*.pyc"]
83 | self._remove_pre_update_patterns = list()
84 |
85 | # By default, don't auto disable+re-enable the addon after an update,
86 | # as this is less stable/often won't fully reload all modules anyways.
87 | self._auto_reload_post_update = False
88 |
89 | # Settings for the frequency of automated background checks.
90 | self._check_interval_enabled = False
91 | self._check_interval_months = 0
92 | self._check_interval_days = 7
93 | self._check_interval_hours = 0
94 | self._check_interval_minutes = 0
95 |
96 | # runtime variables, initial conditions
97 | self._verbose = False
98 | self._use_print_traces = True
99 | self._fake_install = False
100 | self._async_checking = False # only true when async daemon started
101 | self._update_ready = None
102 | self._update_link = None
103 | self._update_version = None
104 | self._source_zip = None
105 | self._check_thread = None
106 | self._select_link = None
107 | self.skip_tag = None
108 |
109 | # Get data from the running blender module (addon).
110 | self._addon = __package__.lower()
111 | self._addon_package = __package__ # Must not change.
112 | self._updater_path = os.path.join(
113 | os.path.dirname(__file__), self._addon + "_updater")
114 | self._addon_root = os.path.dirname(__file__)
115 | self._json = dict()
116 | self._error = None
117 | self._error_msg = None
118 | self._prefiltered_tag_count = 0
119 |
120 | # UI properties, not used within this module but still useful to have.
121 |
122 | # to verify a valid import, in place of placeholder import
123 | self.show_popups = True # UI uses to show popups or not.
124 | self.invalid_updater = False
125 |
126 | # pre-assign basic select-link function
127 | def select_link_function(self, tag):
128 | return tag["zipball_url"]
129 |
130 | self._select_link = select_link_function
131 |
132 | def print_trace(self):
133 | """Print handled exception details when use_print_traces is set"""
134 | if self._use_print_traces:
135 | traceback.print_exc()
136 |
137 | def print_verbose(self, msg):
138 | """Print out a verbose logging message if verbose is true."""
139 | if not self._verbose:
140 | return
141 | print("{} addon: ".format(self.addon) + msg)
142 |
143 | # -------------------------------------------------------------------------
144 | # Getters and setters
145 | # -------------------------------------------------------------------------
146 | @property
147 | def addon(self):
148 | return self._addon
149 |
150 | @addon.setter
151 | def addon(self, value):
152 | self._addon = str(value)
153 |
154 | @property
155 | def api_url(self):
156 | return self._engine.api_url
157 |
158 | @api_url.setter
159 | def api_url(self, value):
160 | if not self.check_is_url(value):
161 | raise ValueError("Not a valid URL: " + value)
162 | self._engine.api_url = value
163 |
164 | @property
165 | def async_checking(self):
166 | return self._async_checking
167 |
168 | @property
169 | def auto_reload_post_update(self):
170 | return self._auto_reload_post_update
171 |
172 | @auto_reload_post_update.setter
173 | def auto_reload_post_update(self, value):
174 | try:
175 | self._auto_reload_post_update = bool(value)
176 | except:
177 | raise ValueError("auto_reload_post_update must be a boolean value")
178 |
179 | @property
180 | def backup_current(self):
181 | return self._backup_current
182 |
183 | @backup_current.setter
184 | def backup_current(self, value):
185 | if value is None:
186 | self._backup_current = False
187 | else:
188 | self._backup_current = value
189 |
190 | @property
191 | def backup_ignore_patterns(self):
192 | return self._backup_ignore_patterns
193 |
194 | @backup_ignore_patterns.setter
195 | def backup_ignore_patterns(self, value):
196 | if value is None:
197 | self._backup_ignore_patterns = None
198 | elif not isinstance(value, list):
199 | raise ValueError("Backup pattern must be in list format")
200 | else:
201 | self._backup_ignore_patterns = value
202 |
203 | @property
204 | def check_interval(self):
205 | return (self._check_interval_enabled,
206 | self._check_interval_months,
207 | self._check_interval_days,
208 | self._check_interval_hours,
209 | self._check_interval_minutes)
210 |
211 | @property
212 | def current_version(self):
213 | return self._current_version
214 |
215 | @current_version.setter
216 | def current_version(self, tuple_values):
217 | if tuple_values is None:
218 | self._current_version = None
219 | return
220 | elif type(tuple_values) is not tuple:
221 | try:
222 | tuple(tuple_values)
223 | except:
224 | raise ValueError(
225 | "current_version must be a tuple of integers")
226 | for i in tuple_values:
227 | if type(i) is not int:
228 | raise ValueError(
229 | "current_version must be a tuple of integers")
230 | self._current_version = tuple(tuple_values)
231 |
232 | @property
233 | def engine(self):
234 | return self._engine.name
235 |
236 | @engine.setter
237 | def engine(self, value):
238 | engine = value.lower()
239 | if engine == "github":
240 | self._engine = GithubEngine()
241 | elif engine == "gitlab":
242 | self._engine = GitlabEngine()
243 | elif engine == "bitbucket":
244 | self._engine = BitbucketEngine()
245 | else:
246 | raise ValueError("Invalid engine selection")
247 |
248 | @property
249 | def error(self):
250 | return self._error
251 |
252 | @property
253 | def error_msg(self):
254 | return self._error_msg
255 |
256 | @property
257 | def fake_install(self):
258 | return self._fake_install
259 |
260 | @fake_install.setter
261 | def fake_install(self, value):
262 | if not isinstance(value, bool):
263 | raise ValueError("fake_install must be a boolean value")
264 | self._fake_install = bool(value)
265 |
266 | # not currently used
267 | @property
268 | def include_branch_auto_check(self):
269 | return self._include_branch_auto_check
270 |
271 | @include_branch_auto_check.setter
272 | def include_branch_auto_check(self, value):
273 | try:
274 | self._include_branch_auto_check = bool(value)
275 | except:
276 | raise ValueError("include_branch_autocheck must be a boolean")
277 |
278 | @property
279 | def include_branch_list(self):
280 | return self._include_branch_list
281 |
282 | @include_branch_list.setter
283 | def include_branch_list(self, value):
284 | try:
285 | if value is None:
286 | self._include_branch_list = ['master']
287 | elif not isinstance(value, list) or len(value) == 0:
288 | raise ValueError(
289 | "include_branch_list should be a list of valid branches")
290 | else:
291 | self._include_branch_list = value
292 | except:
293 | raise ValueError(
294 | "include_branch_list should be a list of valid branches")
295 |
296 | @property
297 | def include_branches(self):
298 | return self._include_branches
299 |
300 | @include_branches.setter
301 | def include_branches(self, value):
302 | try:
303 | self._include_branches = bool(value)
304 | except:
305 | raise ValueError("include_branches must be a boolean value")
306 |
307 | @property
308 | def json(self):
309 | if len(self._json) == 0:
310 | self.set_updater_json()
311 | return self._json
312 |
313 | @property
314 | def latest_release(self):
315 | if self._latest_release is None:
316 | return None
317 | return self._latest_release
318 |
319 | @property
320 | def manual_only(self):
321 | return self._manual_only
322 |
323 | @manual_only.setter
324 | def manual_only(self, value):
325 | try:
326 | self._manual_only = bool(value)
327 | except:
328 | raise ValueError("manual_only must be a boolean value")
329 |
330 | @property
331 | def overwrite_patterns(self):
332 | return self._overwrite_patterns
333 |
334 | @overwrite_patterns.setter
335 | def overwrite_patterns(self, value):
336 | if value is None:
337 | self._overwrite_patterns = ["*.py", "*.pyc"]
338 | elif not isinstance(value, list):
339 | raise ValueError("overwrite_patterns needs to be in a list format")
340 | else:
341 | self._overwrite_patterns = value
342 |
343 | @property
344 | def private_token(self):
345 | return self._engine.token
346 |
347 | @private_token.setter
348 | def private_token(self, value):
349 | if value is None:
350 | self._engine.token = None
351 | else:
352 | self._engine.token = str(value)
353 |
354 | @property
355 | def remove_pre_update_patterns(self):
356 | return self._remove_pre_update_patterns
357 |
358 | @remove_pre_update_patterns.setter
359 | def remove_pre_update_patterns(self, value):
360 | if value is None:
361 | self._remove_pre_update_patterns = list()
362 | elif not isinstance(value, list):
363 | raise ValueError(
364 | "remove_pre_update_patterns needs to be in a list format")
365 | else:
366 | self._remove_pre_update_patterns = value
367 |
368 | @property
369 | def repo(self):
370 | return self._repo
371 |
372 | @repo.setter
373 | def repo(self, value):
374 | try:
375 | self._repo = str(value)
376 | except:
377 | raise ValueError("repo must be a string value")
378 |
379 | @property
380 | def select_link(self):
381 | return self._select_link
382 |
383 | @select_link.setter
384 | def select_link(self, value):
385 | # ensure it is a function assignment, with signature:
386 | # input self, tag; returns link name
387 | if not hasattr(value, "__call__"):
388 | raise ValueError("select_link must be a function")
389 | self._select_link = value
390 |
391 | @property
392 | def stage_path(self):
393 | return self._updater_path
394 |
395 | @stage_path.setter
396 | def stage_path(self, value):
397 | if value is None:
398 | self.print_verbose("Aborting assigning stage_path, it's null")
399 | return
400 | elif value is not None and not os.path.exists(value):
401 | try:
402 | os.makedirs(value)
403 | except:
404 | self.print_verbose("Error trying to staging path")
405 | self.print_trace()
406 | return
407 | self._updater_path = value
408 |
409 | @property
410 | def subfolder_path(self):
411 | return self._subfolder_path
412 |
413 | @subfolder_path.setter
414 | def subfolder_path(self, value):
415 | self._subfolder_path = value
416 |
417 | @property
418 | def tags(self):
419 | if len(self._tags) == 0:
420 | return list()
421 | tag_names = list()
422 | for tag in self._tags:
423 | tag_names.append(tag["name"])
424 | return tag_names
425 |
426 | @property
427 | def tag_latest(self):
428 | if self._tag_latest is None:
429 | return None
430 | return self._tag_latest["name"]
431 |
432 | @property
433 | def update_link(self):
434 | return self._update_link
435 |
436 | @property
437 | def update_ready(self):
438 | return self._update_ready
439 |
440 | @property
441 | def update_version(self):
442 | return self._update_version
443 |
444 | @property
445 | def use_releases(self):
446 | return self._use_releases
447 |
448 | @use_releases.setter
449 | def use_releases(self, value):
450 | try:
451 | self._use_releases = bool(value)
452 | except:
453 | raise ValueError("use_releases must be a boolean value")
454 |
455 | @property
456 | def user(self):
457 | return self._user
458 |
459 | @user.setter
460 | def user(self, value):
461 | try:
462 | self._user = str(value)
463 | except:
464 | raise ValueError("User must be a string value")
465 |
466 | @property
467 | def verbose(self):
468 | return self._verbose
469 |
470 | @verbose.setter
471 | def verbose(self, value):
472 | try:
473 | self._verbose = bool(value)
474 | self.print_verbose("Verbose is enabled")
475 | except:
476 | raise ValueError("Verbose must be a boolean value")
477 |
478 | @property
479 | def use_print_traces(self):
480 | return self._use_print_traces
481 |
482 | @use_print_traces.setter
483 | def use_print_traces(self, value):
484 | try:
485 | self._use_print_traces = bool(value)
486 | except:
487 | raise ValueError("use_print_traces must be a boolean value")
488 |
489 | @property
490 | def version_max_update(self):
491 | return self._version_max_update
492 |
493 | @version_max_update.setter
494 | def version_max_update(self, value):
495 | if value is None:
496 | self._version_max_update = None
497 | return
498 | if not isinstance(value, tuple):
499 | raise ValueError("Version maximum must be a tuple")
500 | for subvalue in value:
501 | if type(subvalue) is not int:
502 | raise ValueError("Version elements must be integers")
503 | self._version_max_update = value
504 |
505 | @property
506 | def version_min_update(self):
507 | return self._version_min_update
508 |
509 | @version_min_update.setter
510 | def version_min_update(self, value):
511 | if value is None:
512 | self._version_min_update = None
513 | return
514 | if not isinstance(value, tuple):
515 | raise ValueError("Version minimum must be a tuple")
516 | for subvalue in value:
517 | if type(subvalue) != int:
518 | raise ValueError("Version elements must be integers")
519 | self._version_min_update = value
520 |
521 | @property
522 | def website(self):
523 | return self._website
524 |
525 | @website.setter
526 | def website(self, value):
527 | if not self.check_is_url(value):
528 | raise ValueError("Not a valid URL: " + value)
529 | self._website = value
530 |
531 | # -------------------------------------------------------------------------
532 | # Parameter validation related functions
533 | # -------------------------------------------------------------------------
534 | @staticmethod
535 | def check_is_url(url):
536 | if not ("http://" in url or "https://" in url):
537 | return False
538 | if "." not in url:
539 | return False
540 | return True
541 |
542 | def _get_tag_names(self):
543 | tag_names = list()
544 | self.get_tags()
545 | for tag in self._tags:
546 | tag_names.append(tag["name"])
547 | return tag_names
548 |
549 | def set_check_interval(self, enabled=False,
550 | months=0, days=14, hours=0, minutes=0):
551 | """Set the time interval between automated checks, and if enabled.
552 |
553 | Has enabled = False as default to not check against frequency,
554 | if enabled, default is 2 weeks.
555 | """
556 |
557 | if type(enabled) is not bool:
558 | raise ValueError("Enable must be a boolean value")
559 | if type(months) is not int:
560 | raise ValueError("Months must be an integer value")
561 | if type(days) is not int:
562 | raise ValueError("Days must be an integer value")
563 | if type(hours) is not int:
564 | raise ValueError("Hours must be an integer value")
565 | if type(minutes) is not int:
566 | raise ValueError("Minutes must be an integer value")
567 |
568 | if not enabled:
569 | self._check_interval_enabled = False
570 | else:
571 | self._check_interval_enabled = True
572 |
573 | self._check_interval_months = months
574 | self._check_interval_days = days
575 | self._check_interval_hours = hours
576 | self._check_interval_minutes = minutes
577 |
578 | def __repr__(self):
579 | return "".format(a=__file__)
580 |
581 | def __str__(self):
582 | return "Updater, with user: {a}, repository: {b}, url: {c}".format(
583 | a=self._user, b=self._repo, c=self.form_repo_url())
584 |
585 | # -------------------------------------------------------------------------
586 | # API-related functions
587 | # -------------------------------------------------------------------------
588 | def form_repo_url(self):
589 | return self._engine.form_repo_url(self)
590 |
591 | def form_tags_url(self):
592 | return self._engine.form_tags_url(self)
593 |
594 | def form_branch_url(self, branch):
595 | return self._engine.form_branch_url(branch, self)
596 |
597 | def get_tags(self):
598 | request = self.form_tags_url()
599 | self.print_verbose("Getting tags from server")
600 |
601 | # get all tags, internet call
602 | all_tags = self._engine.parse_tags(self.get_api(request), self)
603 | if all_tags is not None:
604 | self._prefiltered_tag_count = len(all_tags)
605 | else:
606 | self._prefiltered_tag_count = 0
607 | all_tags = list()
608 |
609 | # pre-process to skip tags
610 | if self.skip_tag is not None:
611 | self._tags = [tg for tg in all_tags if not self.skip_tag(self, tg)]
612 | else:
613 | self._tags = all_tags
614 |
615 | # get additional branches too, if needed, and place in front
616 | # Does NO checking here whether branch is valid
617 | if self._include_branches:
618 | temp_branches = self._include_branch_list.copy()
619 | temp_branches.reverse()
620 | for branch in temp_branches:
621 | request = self.form_branch_url(branch)
622 | include = {
623 | "name": branch.title(),
624 | "zipball_url": request
625 | }
626 | self._tags = [include] + self._tags # append to front
627 |
628 | if self._tags is None:
629 | # some error occurred
630 | self._tag_latest = None
631 | self._tags = list()
632 |
633 | elif self._prefiltered_tag_count == 0 and not self._include_branches:
634 | self._tag_latest = None
635 | if self._error is None: # if not None, could have had no internet
636 | self._error = "No releases found"
637 | self._error_msg = "No releases or tags found in repository"
638 | self.print_verbose("No releases or tags found in repository")
639 |
640 | elif self._prefiltered_tag_count == 0 and self._include_branches:
641 | if not self._error:
642 | self._tag_latest = self._tags[0]
643 | branch = self._include_branch_list[0]
644 | self.print_verbose("{} branch found, no releases: {}".format(
645 | branch, self._tags[0]))
646 |
647 | elif ((len(self._tags) - len(self._include_branch_list) == 0
648 | and self._include_branches)
649 | or (len(self._tags) == 0 and not self._include_branches)
650 | and self._prefiltered_tag_count > 0):
651 | self._tag_latest = None
652 | self._error = "No releases available"
653 | self._error_msg = "No versions found within compatible version range"
654 | self.print_verbose(self._error_msg)
655 |
656 | else:
657 | if not self._include_branches:
658 | self._tag_latest = self._tags[0]
659 | self.print_verbose(
660 | "Most recent tag found:" + str(self._tags[0]['name']))
661 | else:
662 | # Don't return branch if in list.
663 | n = len(self._include_branch_list)
664 | self._tag_latest = self._tags[n] # guaranteed at least len()=n+1
665 | self.print_verbose(
666 | "Most recent tag found:" + str(self._tags[n]['name']))
667 |
668 | def get_raw(self, url):
669 | """All API calls to base url."""
670 | request = urllib.request.Request(url)
671 | try:
672 | context = ssl._create_unverified_context()
673 | except:
674 | # Some blender packaged python versions don't have this, largely
675 | # useful for local network setups otherwise minimal impact.
676 | context = None
677 |
678 | # Setup private request headers if appropriate.
679 | if self._engine.token is not None:
680 | if self._engine.name == "gitlab":
681 | request.add_header('PRIVATE-TOKEN', self._engine.token)
682 | else:
683 | self.print_verbose("Tokens not setup for engine yet")
684 |
685 | # Always set user agent.
686 | request.add_header(
687 | 'User-Agent', "Python/" + str(platform.python_version()))
688 |
689 | # Run the request.
690 | try:
691 | if context:
692 | result = urllib.request.urlopen(request, context=context)
693 | else:
694 | result = urllib.request.urlopen(request)
695 | except urllib.error.HTTPError as e:
696 | if str(e.code) == "403":
697 | self._error = "HTTP error (access denied)"
698 | self._error_msg = str(e.code) + " - server error response"
699 | print(self._error, self._error_msg)
700 | else:
701 | self._error = "HTTP error"
702 | self._error_msg = str(e.code)
703 | print(self._error, self._error_msg)
704 | self.print_trace()
705 | self._update_ready = None
706 | except urllib.error.URLError as e:
707 | reason = str(e.reason)
708 | if "TLSV1_ALERT" in reason or "SSL" in reason.upper():
709 | self._error = "Connection rejected, download manually"
710 | self._error_msg = reason
711 | print(self._error, self._error_msg)
712 | else:
713 | self._error = "URL error, check internet connection"
714 | self._error_msg = reason
715 | print(self._error, self._error_msg)
716 | self.print_trace()
717 | self._update_ready = None
718 | return None
719 | else:
720 | result_string = result.read()
721 | result.close()
722 | return result_string.decode()
723 |
724 | def get_api(self, url):
725 | """Result of all api calls, decoded into json format."""
726 | get = None
727 | get = self.get_raw(url)
728 | if get is not None:
729 | try:
730 | return json.JSONDecoder().decode(get)
731 | except Exception as e:
732 | self._error = "API response has invalid JSON format"
733 | self._error_msg = str(e.reason)
734 | self._update_ready = None
735 | print(self._error, self._error_msg)
736 | self.print_trace()
737 | return None
738 | else:
739 | return None
740 |
741 | def stage_repository(self, url):
742 | """Create a working directory and download the new files"""
743 |
744 | local = os.path.join(self._updater_path, "update_staging")
745 | error = None
746 |
747 | # Make/clear the staging folder, to ensure the folder is always clean.
748 | self.print_verbose(
749 | "Preparing staging folder for download:\n" + str(local))
750 | if os.path.isdir(local):
751 | try:
752 | shutil.rmtree(local)
753 | os.makedirs(local)
754 | except:
755 | error = "failed to remove existing staging directory"
756 | self.print_trace()
757 | else:
758 | try:
759 | os.makedirs(local)
760 | except:
761 | error = "failed to create staging directory"
762 | self.print_trace()
763 |
764 | if error is not None:
765 | self.print_verbose("Error: Aborting update, " + error)
766 | self._error = "Update aborted, staging path error"
767 | self._error_msg = "Error: {}".format(error)
768 | return False
769 |
770 | if self._backup_current:
771 | self.create_backup()
772 |
773 | self.print_verbose("Now retrieving the new source zip")
774 | self._source_zip = os.path.join(local, "source.zip")
775 | self.print_verbose("Starting download update zip")
776 | try:
777 | request = urllib.request.Request(url)
778 | context = ssl._create_unverified_context()
779 |
780 | # Setup private token if appropriate.
781 | if self._engine.token is not None:
782 | if self._engine.name == "gitlab":
783 | request.add_header('PRIVATE-TOKEN', self._engine.token)
784 | else:
785 | self.print_verbose(
786 | "Tokens not setup for selected engine yet")
787 |
788 | # Always set user agent
789 | request.add_header(
790 | 'User-Agent', "Python/" + str(platform.python_version()))
791 |
792 | self.url_retrieve(urllib.request.urlopen(request, context=context),
793 | self._source_zip)
794 | # Add additional checks on file size being non-zero.
795 | self.print_verbose("Successfully downloaded update zip")
796 | return True
797 | except Exception as e:
798 | self._error = "Error retrieving download, bad link?"
799 | self._error_msg = "Error: {}".format(e)
800 | print("Error retrieving download, bad link?")
801 | print("Error: {}".format(e))
802 | self.print_trace()
803 | return False
804 |
805 | def create_backup(self):
806 | """Save a backup of the current installed addon prior to an update."""
807 | self.print_verbose("Backing up current addon folder")
808 | local = os.path.join(self._updater_path, "backup")
809 | tempdest = os.path.join(
810 | self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
811 |
812 | self.print_verbose("Backup destination path: " + str(local))
813 |
814 | if os.path.isdir(local):
815 | try:
816 | shutil.rmtree(local)
817 | except:
818 | self.print_verbose(
819 | "Failed to removed previous backup folder, continuing")
820 | self.print_trace()
821 |
822 | # Remove the temp folder.
823 | # Shouldn't exist but could if previously interrupted.
824 | if os.path.isdir(tempdest):
825 | try:
826 | shutil.rmtree(tempdest)
827 | except:
828 | self.print_verbose(
829 | "Failed to remove existing temp folder, continuing")
830 | self.print_trace()
831 |
832 | # Make a full addon copy, temporarily placed outside the addon folder.
833 | if self._backup_ignore_patterns is not None:
834 | try:
835 | shutil.copytree(self._addon_root, tempdest,
836 | ignore=shutil.ignore_patterns(
837 | *self._backup_ignore_patterns))
838 | except:
839 | print("Failed to create backup, still attempting update.")
840 | self.print_trace()
841 | return
842 | else:
843 | try:
844 | shutil.copytree(self._addon_root, tempdest)
845 | except:
846 | print("Failed to create backup, still attempting update.")
847 | self.print_trace()
848 | return
849 | shutil.move(tempdest, local)
850 |
851 | # Save the date for future reference.
852 | now = datetime.now()
853 | self._json["backup_date"] = "{m}-{d}-{yr}".format(
854 | m=now.strftime("%B"), d=now.day, yr=now.year)
855 | self.save_updater_json()
856 |
857 | def restore_backup(self):
858 | """Restore the last backed up addon version, user initiated only"""
859 | self.print_verbose("Restoring backup, backing up current addon folder")
860 | backuploc = os.path.join(self._updater_path, "backup")
861 | tempdest = os.path.join(
862 | self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
863 | tempdest = os.path.abspath(tempdest)
864 |
865 | # Move instead contents back in place, instead of copy.
866 | shutil.move(backuploc, tempdest)
867 | shutil.rmtree(self._addon_root)
868 | os.rename(tempdest, self._addon_root)
869 |
870 | self._json["backup_date"] = ""
871 | self._json["just_restored"] = True
872 | self._json["just_updated"] = True
873 | self.save_updater_json()
874 |
875 | self.reload_addon()
876 |
877 | def unpack_staged_zip(self, clean=False):
878 | """Unzip the downloaded file, and validate contents"""
879 | if not os.path.isfile(self._source_zip):
880 | self.print_verbose("Error, update zip not found")
881 | self._error = "Install failed"
882 | self._error_msg = "Downloaded zip not found"
883 | return -1
884 |
885 | # Clear the existing source folder in case previous files remain.
886 | outdir = os.path.join(self._updater_path, "source")
887 | try:
888 | shutil.rmtree(outdir)
889 | self.print_verbose("Source folder cleared")
890 | except:
891 | self.print_trace()
892 |
893 | # Create parent directories if needed, would not be relevant unless
894 | # installing addon into another location or via an addon manager.
895 | try:
896 | os.mkdir(outdir)
897 | except Exception as err:
898 | print("Error occurred while making extract dir:")
899 | print(str(err))
900 | self.print_trace()
901 | self._error = "Install failed"
902 | self._error_msg = "Failed to make extract directory"
903 | return -1
904 |
905 | if not os.path.isdir(outdir):
906 | print("Failed to create source directory")
907 | self._error = "Install failed"
908 | self._error_msg = "Failed to create extract directory"
909 | return -1
910 |
911 | self.print_verbose(
912 | "Begin extracting source from zip:" + str(self._source_zip))
913 | zfile = zipfile.ZipFile(self._source_zip, "r")
914 |
915 | if not zfile:
916 | self._error = "Install failed"
917 | self._error_msg = "Resulting file is not a zip, cannot extract"
918 | self.print_verbose(self._error_msg)
919 | return -1
920 |
921 | # Now extract directly from the first subfolder (not root)
922 | # this avoids adding the first subfolder to the path length,
923 | # which can be too long if the download has the SHA in the name.
924 | zsep = '/' # Not using os.sep, always the / value even on windows.
925 | for name in zfile.namelist():
926 | if zsep not in name:
927 | continue
928 | top_folder = name[:name.index(zsep) + 1]
929 | if name == top_folder + zsep:
930 | continue # skip top level folder
931 | sub_path = name[name.index(zsep) + 1:]
932 | if name.endswith(zsep):
933 | try:
934 | os.mkdir(os.path.join(outdir, sub_path))
935 | self.print_verbose(
936 | "Extract - mkdir: " + os.path.join(outdir, sub_path))
937 | except OSError as exc:
938 | if exc.errno != errno.EEXIST:
939 | self._error = "Install failed"
940 | self._error_msg = "Could not create folder from zip"
941 | self.print_trace()
942 | return -1
943 | else:
944 | with open(os.path.join(outdir, sub_path), "wb") as outfile:
945 | data = zfile.read(name)
946 | outfile.write(data)
947 | self.print_verbose(
948 | "Extract - create: " + os.path.join(outdir, sub_path))
949 |
950 | self.print_verbose("Extracted source")
951 |
952 | unpath = os.path.join(self._updater_path, "source")
953 | if not os.path.isdir(unpath):
954 | self._error = "Install failed"
955 | self._error_msg = "Extracted path does not exist"
956 | print("Extracted path does not exist: ", unpath)
957 | return -1
958 |
959 | if self._subfolder_path:
960 | self._subfolder_path.replace('/', os.path.sep)
961 | self._subfolder_path.replace('\\', os.path.sep)
962 |
963 | # Either directly in root of zip/one subfolder, or use specified path.
964 | if not os.path.isfile(os.path.join(unpath, "__init__.py")):
965 | dirlist = os.listdir(unpath)
966 | if len(dirlist) > 0:
967 | if self._subfolder_path == "" or self._subfolder_path is None:
968 | unpath = os.path.join(unpath, dirlist[0])
969 | else:
970 | unpath = os.path.join(unpath, self._subfolder_path)
971 |
972 | # Smarter check for additional sub folders for a single folder
973 | # containing the __init__.py file.
974 | if not os.path.isfile(os.path.join(unpath, "__init__.py")):
975 | print("Not a valid addon found")
976 | print("Paths:")
977 | print(dirlist)
978 | self._error = "Install failed"
979 | self._error_msg = "No __init__ file found in new source"
980 | return -1
981 |
982 | # Merge code with the addon directory, using blender default behavior,
983 | # plus any modifiers indicated by user (e.g. force remove/keep).
984 | self.deep_merge_directory(self._addon_root, unpath, clean)
985 |
986 | # Now save the json state.
987 | # Change to True to trigger the handler on other side if allowing
988 | # reloading within same blender session.
989 | self._json["just_updated"] = True
990 | self.save_updater_json()
991 | self.reload_addon()
992 | self._update_ready = False
993 | return 0
994 |
995 | def deep_merge_directory(self, base, merger, clean=False):
996 | """Merge folder 'merger' into 'base' without deleting existing"""
997 | if not os.path.exists(base):
998 | self.print_verbose("Base path does not exist:" + str(base))
999 | return -1
1000 | elif not os.path.exists(merger):
1001 | self.print_verbose("Merger path does not exist")
1002 | return -1
1003 |
1004 | # Path to be aware of and not overwrite/remove/etc.
1005 | staging_path = os.path.join(self._updater_path, "update_staging")
1006 |
1007 | # If clean install is enabled, clear existing files ahead of time
1008 | # note: will not delete the update.json, update folder, staging, or
1009 | # staging but will delete all other folders/files in addon directory.
1010 | error = None
1011 | if clean:
1012 | try:
1013 | # Implement clearing of all folders/files, except the updater
1014 | # folder and updater json.
1015 | # Careful, this deletes entire subdirectories recursively...
1016 | # Make sure that base is not a high level shared folder, but
1017 | # is dedicated just to the addon itself.
1018 | self.print_verbose(
1019 | "clean=True, clearing addon folder to fresh install state")
1020 |
1021 | # Remove root files and folders (except update folder).
1022 | files = [f for f in os.listdir(base)
1023 | if os.path.isfile(os.path.join(base, f))]
1024 | folders = [f for f in os.listdir(base)
1025 | if os.path.isdir(os.path.join(base, f))]
1026 |
1027 | for f in files:
1028 | os.remove(os.path.join(base, f))
1029 | self.print_verbose(
1030 | "Clean removing file {}".format(os.path.join(base, f)))
1031 | for f in folders:
1032 | if os.path.join(base, f) is self._updater_path:
1033 | continue
1034 | shutil.rmtree(os.path.join(base, f))
1035 | self.print_verbose(
1036 | "Clean removing folder and contents {}".format(
1037 | os.path.join(base, f)))
1038 |
1039 | except Exception as err:
1040 | error = "failed to create clean existing addon folder"
1041 | print(error, str(err))
1042 | self.print_trace()
1043 |
1044 | # Walk through the base addon folder for rules on pre-removing
1045 | # but avoid removing/altering backup and updater file.
1046 | for path, dirs, files in os.walk(base):
1047 | # Prune ie skip updater folder.
1048 | dirs[:] = [d for d in dirs
1049 | if os.path.join(path, d) not in [self._updater_path]]
1050 | for file in files:
1051 | for pattern in self.remove_pre_update_patterns:
1052 | if fnmatch.filter([file], pattern):
1053 | try:
1054 | fl = os.path.join(path, file)
1055 | os.remove(fl)
1056 | self.print_verbose("Pre-removed file " + file)
1057 | except OSError:
1058 | print("Failed to pre-remove " + file)
1059 | self.print_trace()
1060 |
1061 | # Walk through the temp addon sub folder for replacements
1062 | # this implements the overwrite rules, which apply after
1063 | # the above pre-removal rules. This also performs the
1064 | # actual file copying/replacements.
1065 | for path, dirs, files in os.walk(merger):
1066 | # Verify structure works to prune updater sub folder overwriting.
1067 | dirs[:] = [d for d in dirs
1068 | if os.path.join(path, d) not in [self._updater_path]]
1069 | rel_path = os.path.relpath(path, merger)
1070 | dest_path = os.path.join(base, rel_path)
1071 | if not os.path.exists(dest_path):
1072 | os.makedirs(dest_path)
1073 | for file in files:
1074 | # Bring in additional logic around copying/replacing.
1075 | # Blender default: overwrite .py's, don't overwrite the rest.
1076 | dest_file = os.path.join(dest_path, file)
1077 | srcFile = os.path.join(path, file)
1078 |
1079 | # Decide to replace if file already exists, and copy new over.
1080 | if os.path.isfile(dest_file):
1081 | # Otherwise, check each file for overwrite pattern match.
1082 | replaced = False
1083 | for pattern in self._overwrite_patterns:
1084 | if fnmatch.filter([file], pattern):
1085 | replaced = True
1086 | break
1087 | if replaced:
1088 | os.remove(dest_file)
1089 | os.rename(srcFile, dest_file)
1090 | self.print_verbose(
1091 | "Overwrote file " + os.path.basename(dest_file))
1092 | else:
1093 | self.print_verbose(
1094 | "Pattern not matched to {}, not overwritten".format(
1095 | os.path.basename(dest_file)))
1096 | else:
1097 | # File did not previously exist, simply move it over.
1098 | os.rename(srcFile, dest_file)
1099 | self.print_verbose(
1100 | "New file " + os.path.basename(dest_file))
1101 |
1102 | # now remove the temp staging folder and downloaded zip
1103 | try:
1104 | shutil.rmtree(staging_path)
1105 | except:
1106 | error = ("Error: Failed to remove existing staging directory, "
1107 | "consider manually removing ") + staging_path
1108 | self.print_verbose(error)
1109 | self.print_trace()
1110 |
1111 | def reload_addon(self):
1112 | # if post_update false, skip this function
1113 | # else, unload/reload addon & trigger popup
1114 | if not self._auto_reload_post_update:
1115 | print("Restart blender to reload addon and complete update")
1116 | return
1117 |
1118 | self.print_verbose("Reloading addon...")
1119 | addon_utils.modules(refresh=True)
1120 | bpy.utils.refresh_script_paths()
1121 |
1122 | # not allowed in restricted context, such as register module
1123 | # toggle to refresh
1124 | if "addon_disable" in dir(bpy.ops.wm): # 2.7
1125 | bpy.ops.wm.addon_disable(module=self._addon_package)
1126 | bpy.ops.wm.addon_refresh()
1127 | bpy.ops.wm.addon_enable(module=self._addon_package)
1128 | print("2.7 reload complete")
1129 | else: # 2.8
1130 | bpy.ops.preferences.addon_disable(module=self._addon_package)
1131 | bpy.ops.preferences.addon_refresh()
1132 | bpy.ops.preferences.addon_enable(module=self._addon_package)
1133 | print("2.8 reload complete")
1134 |
1135 | # -------------------------------------------------------------------------
1136 | # Other non-api functions and setups
1137 | # -------------------------------------------------------------------------
1138 | def clear_state(self):
1139 | self._update_ready = None
1140 | self._update_link = None
1141 | self._update_version = None
1142 | self._source_zip = None
1143 | self._error = None
1144 | self._error_msg = None
1145 |
1146 | def url_retrieve(self, url_file, filepath):
1147 | """Custom urlretrieve implementation"""
1148 | chunk = 1024 * 8
1149 | f = open(filepath, "wb")
1150 | while 1:
1151 | data = url_file.read(chunk)
1152 | if not data:
1153 | # print("done.")
1154 | break
1155 | f.write(data)
1156 | # print("Read %s bytes" % len(data))
1157 | f.close()
1158 |
1159 | def version_tuple_from_text(self, text):
1160 | """Convert text into a tuple of numbers (int).
1161 |
1162 | Should go through string and remove all non-integers, and for any
1163 | given break split into a different section.
1164 | """
1165 | if text is None:
1166 | return ()
1167 |
1168 | segments = list()
1169 | tmp = ''
1170 | for char in str(text):
1171 | if not char.isdigit():
1172 | if len(tmp) > 0:
1173 | segments.append(int(tmp))
1174 | tmp = ''
1175 | else:
1176 | tmp += char
1177 | if len(tmp) > 0:
1178 | segments.append(int(tmp))
1179 |
1180 | if len(segments) == 0:
1181 | self.print_verbose("No version strings found text: " + str(text))
1182 | if not self._include_branches:
1183 | return ()
1184 | else:
1185 | return (text)
1186 | return tuple(segments)
1187 |
1188 | def check_for_update_async(self, callback=None):
1189 | """Called for running check in a background thread"""
1190 | is_ready = (
1191 | self._json is not None
1192 | and "update_ready" in self._json
1193 | and self._json["version_text"] != dict()
1194 | and self._json["update_ready"])
1195 |
1196 | if is_ready:
1197 | self._update_ready = True
1198 | self._update_link = self._json["version_text"]["link"]
1199 | self._update_version = str(self._json["version_text"]["version"])
1200 | # Cached update.
1201 | callback(True)
1202 | return
1203 |
1204 | # do the check
1205 | if not self._check_interval_enabled:
1206 | return
1207 | elif self._async_checking:
1208 | self.print_verbose("Skipping async check, already started")
1209 | # already running the bg thread
1210 | elif self._update_ready is None:
1211 | print("{} updater: Running background check for update".format(
1212 | self.addon))
1213 | self.start_async_check_update(False, callback)
1214 |
1215 | def check_for_update_now(self, callback=None):
1216 | self._error = None
1217 | self._error_msg = None
1218 | self.print_verbose(
1219 | "Check update pressed, first getting current status")
1220 | if self._async_checking:
1221 | self.print_verbose("Skipping async check, already started")
1222 | return # already running the bg thread
1223 | elif self._update_ready is None:
1224 | self.start_async_check_update(True, callback)
1225 | else:
1226 | self._update_ready = None
1227 | self.start_async_check_update(True, callback)
1228 |
1229 | def check_for_update(self, now=False):
1230 | """Check for update not in a syncrhonous manner.
1231 |
1232 | This function is not async, will always return in sequential fashion
1233 | but should have a parent which calls it in another thread.
1234 | """
1235 | self.print_verbose("Checking for update function")
1236 |
1237 | # clear the errors if any
1238 | self._error = None
1239 | self._error_msg = None
1240 |
1241 | # avoid running again in, just return past result if found
1242 | # but if force now check, then still do it
1243 | if self._update_ready is not None and not now:
1244 | return (self._update_ready,
1245 | self._update_version,
1246 | self._update_link)
1247 |
1248 | if self._current_version is None:
1249 | raise ValueError("current_version not yet defined")
1250 |
1251 | if self._repo is None:
1252 | raise ValueError("repo not yet defined")
1253 |
1254 | if self._user is None:
1255 | raise ValueError("username not yet defined")
1256 |
1257 | self.set_updater_json() # self._json
1258 |
1259 | if not now and not self.past_interval_timestamp():
1260 | self.print_verbose(
1261 | "Aborting check for updated, check interval not reached")
1262 | return (False, None, None)
1263 |
1264 | # check if using tags or releases
1265 | # note that if called the first time, this will pull tags from online
1266 | if self._fake_install:
1267 | self.print_verbose(
1268 | "fake_install = True, setting fake version as ready")
1269 | self._update_ready = True
1270 | self._update_version = "(999,999,999)"
1271 | self._update_link = "http://127.0.0.1"
1272 |
1273 | return (self._update_ready,
1274 | self._update_version,
1275 | self._update_link)
1276 |
1277 | # Primary internet call, sets self._tags and self._tag_latest.
1278 | self.get_tags()
1279 |
1280 | self._json["last_check"] = str(datetime.now())
1281 | self.save_updater_json()
1282 |
1283 | # Can be () or ('master') in addition to branches, and version tag.
1284 | new_version = self.version_tuple_from_text(self.tag_latest)
1285 |
1286 | if len(self._tags) == 0:
1287 | self._update_ready = False
1288 | self._update_version = None
1289 | self._update_link = None
1290 | return (False, None, None)
1291 |
1292 | if not self._include_branches:
1293 | link = self.select_link(self, self._tags[0])
1294 | else:
1295 | n = len(self._include_branch_list)
1296 | if len(self._tags) == n:
1297 | # effectively means no tags found on repo
1298 | # so provide the first one as default
1299 | link = self.select_link(self, self._tags[0])
1300 | else:
1301 | link = self.select_link(self, self._tags[n])
1302 |
1303 | if new_version == ():
1304 | self._update_ready = False
1305 | self._update_version = None
1306 | self._update_link = None
1307 | return (False, None, None)
1308 | elif str(new_version).lower() in self._include_branch_list:
1309 | # Handle situation where master/whichever branch is included
1310 | # however, this code effectively is not triggered now
1311 | # as new_version will only be tag names, not branch names.
1312 | if not self._include_branch_auto_check:
1313 | # Don't offer update as ready, but set the link for the
1314 | # default branch for installing.
1315 | self._update_ready = False
1316 | self._update_version = new_version
1317 | self._update_link = link
1318 | self.save_updater_json()
1319 | return (True, new_version, link)
1320 | else:
1321 | # Bypass releases and look at timestamp of last update from a
1322 | # branch compared to now, see if commit values match or not.
1323 | raise ValueError("include_branch_autocheck: NOT YET DEVELOPED")
1324 |
1325 | else:
1326 | # Situation where branches not included.
1327 | if new_version > self._current_version:
1328 |
1329 | self._update_ready = True
1330 | self._update_version = new_version
1331 | self._update_link = link
1332 | self.save_updater_json()
1333 | return (True, new_version, link)
1334 |
1335 | # If no update, set ready to False from None to show it was checked.
1336 | self._update_ready = False
1337 | self._update_version = None
1338 | self._update_link = None
1339 | return (False, None, None)
1340 |
1341 | def set_tag(self, name):
1342 | """Assign the tag name and url to update to"""
1343 | tg = None
1344 | for tag in self._tags:
1345 | if name == tag["name"]:
1346 | tg = tag
1347 | break
1348 | if tg:
1349 | new_version = self.version_tuple_from_text(self.tag_latest)
1350 | self._update_version = new_version
1351 | self._update_link = self.select_link(self, tg)
1352 | elif self._include_branches and name in self._include_branch_list:
1353 | # scenario if reverting to a specific branch name instead of tag
1354 | tg = name
1355 | link = self.form_branch_url(tg)
1356 | self._update_version = name # this will break things
1357 | self._update_link = link
1358 | if not tg:
1359 | raise ValueError("Version tag not found: " + name)
1360 |
1361 | def run_update(self, force=False, revert_tag=None, clean=False, callback=None):
1362 | """Runs an install, update, or reversion of an addon from online source
1363 |
1364 | Arguments:
1365 | force: Install assigned link, even if self.update_ready is False
1366 | revert_tag: Version to install, if none uses detected update link
1367 | clean: not used, but in future could use to totally refresh addon
1368 | callback: used to run function on update completion
1369 | """
1370 | self._json["update_ready"] = False
1371 | self._json["ignore"] = False # clear ignore flag
1372 | self._json["version_text"] = dict()
1373 |
1374 | if revert_tag is not None:
1375 | self.set_tag(revert_tag)
1376 | self._update_ready = True
1377 |
1378 | # clear the errors if any
1379 | self._error = None
1380 | self._error_msg = None
1381 |
1382 | self.print_verbose("Running update")
1383 |
1384 | if self._fake_install:
1385 | # Change to True, to trigger the reload/"update installed" handler.
1386 | self.print_verbose("fake_install=True")
1387 | self.print_verbose(
1388 | "Just reloading and running any handler triggers")
1389 | self._json["just_updated"] = True
1390 | self.save_updater_json()
1391 | if self._backup_current is True:
1392 | self.create_backup()
1393 | self.reload_addon()
1394 | self._update_ready = False
1395 | res = True # fake "success" zip download flag
1396 |
1397 | elif not force:
1398 | if not self._update_ready:
1399 | self.print_verbose("Update stopped, new version not ready")
1400 | if callback:
1401 | callback(
1402 | self._addon_package,
1403 | "Update stopped, new version not ready")
1404 | return "Update stopped, new version not ready"
1405 | elif self._update_link is None:
1406 | # this shouldn't happen if update is ready
1407 | self.print_verbose("Update stopped, update link unavailable")
1408 | if callback:
1409 | callback(self._addon_package,
1410 | "Update stopped, update link unavailable")
1411 | return "Update stopped, update link unavailable"
1412 |
1413 | if revert_tag is None:
1414 | self.print_verbose("Staging update")
1415 | else:
1416 | self.print_verbose("Staging install")
1417 |
1418 | res = self.stage_repository(self._update_link)
1419 | if not res:
1420 | print("Error in staging repository: " + str(res))
1421 | if callback is not None:
1422 | callback(self._addon_package, self._error_msg)
1423 | return self._error_msg
1424 | res = self.unpack_staged_zip(clean)
1425 | if res < 0:
1426 | if callback:
1427 | callback(self._addon_package, self._error_msg)
1428 | return res
1429 |
1430 | else:
1431 | if self._update_link is None:
1432 | self.print_verbose("Update stopped, could not get link")
1433 | return "Update stopped, could not get link"
1434 | self.print_verbose("Forcing update")
1435 |
1436 | res = self.stage_repository(self._update_link)
1437 | if not res:
1438 | print("Error in staging repository: " + str(res))
1439 | if callback:
1440 | callback(self._addon_package, self._error_msg)
1441 | return self._error_msg
1442 | res = self.unpack_staged_zip(clean)
1443 | if res < 0:
1444 | return res
1445 | # would need to compare against other versions held in tags
1446 |
1447 | # run the front-end's callback if provided
1448 | if callback:
1449 | callback(self._addon_package)
1450 |
1451 | # return something meaningful, 0 means it worked
1452 | return 0
1453 |
1454 | def past_interval_timestamp(self):
1455 | if not self._check_interval_enabled:
1456 | return True # ie this exact feature is disabled
1457 |
1458 | if "last_check" not in self._json or self._json["last_check"] == "":
1459 | return True
1460 |
1461 | now = datetime.now()
1462 | last_check = datetime.strptime(
1463 | self._json["last_check"], "%Y-%m-%d %H:%M:%S.%f")
1464 | offset = timedelta(
1465 | days=self._check_interval_days + 30 * self._check_interval_months,
1466 | hours=self._check_interval_hours,
1467 | minutes=self._check_interval_minutes)
1468 |
1469 | delta = (now - offset) - last_check
1470 | if delta.total_seconds() > 0:
1471 | self.print_verbose("Time to check for updates!")
1472 | return True
1473 |
1474 | self.print_verbose("Determined it's not yet time to check for updates")
1475 | return False
1476 |
1477 | def get_json_path(self):
1478 | """Returns the full path to the JSON state file used by this updater.
1479 |
1480 | Will also rename old file paths to addon-specific path if found.
1481 | """
1482 | json_path = os.path.join(
1483 | self._updater_path,
1484 | "{}_updater_status.json".format(self._addon_package))
1485 | old_json_path = os.path.join(self._updater_path, "updater_status.json")
1486 |
1487 | # Rename old file if it exists.
1488 | try:
1489 | os.rename(old_json_path, json_path)
1490 | except FileNotFoundError:
1491 | pass
1492 | except Exception as err:
1493 | print("Other OS error occurred while trying to rename old JSON")
1494 | print(err)
1495 | self.print_trace()
1496 | return json_path
1497 |
1498 | def set_updater_json(self):
1499 | """Load or initialize JSON dictionary data for updater state"""
1500 | if self._updater_path is None:
1501 | raise ValueError("updater_path is not defined")
1502 | elif not os.path.isdir(self._updater_path):
1503 | os.makedirs(self._updater_path)
1504 |
1505 | jpath = self.get_json_path()
1506 | if os.path.isfile(jpath):
1507 | with open(jpath) as data_file:
1508 | self._json = json.load(data_file)
1509 | self.print_verbose("Read in JSON settings from file")
1510 | else:
1511 | self._json = {
1512 | "last_check": "",
1513 | "backup_date": "",
1514 | "update_ready": False,
1515 | "ignore": False,
1516 | "just_restored": False,
1517 | "just_updated": False,
1518 | "version_text": dict()
1519 | }
1520 | self.save_updater_json()
1521 |
1522 | def save_updater_json(self):
1523 | """Trigger save of current json structure into file within addon"""
1524 | if self._update_ready:
1525 | if isinstance(self._update_version, tuple):
1526 | self._json["update_ready"] = True
1527 | self._json["version_text"]["link"] = self._update_link
1528 | self._json["version_text"]["version"] = self._update_version
1529 | else:
1530 | self._json["update_ready"] = False
1531 | self._json["version_text"] = dict()
1532 | else:
1533 | self._json["update_ready"] = False
1534 | self._json["version_text"] = dict()
1535 |
1536 | jpath = self.get_json_path()
1537 | if not os.path.isdir(os.path.dirname(jpath)):
1538 | print("State error: Directory does not exist, cannot save json: ",
1539 | os.path.basename(jpath))
1540 | return
1541 | try:
1542 | with open(jpath, 'w') as outf:
1543 | data_out = json.dumps(self._json, indent=4)
1544 | outf.write(data_out)
1545 | except:
1546 | print("Failed to open/save data to json: ", jpath)
1547 | self.print_trace()
1548 | self.print_verbose("Wrote out updater JSON settings with content:")
1549 | self.print_verbose(str(self._json))
1550 |
1551 | def json_reset_postupdate(self):
1552 | self._json["just_updated"] = False
1553 | self._json["update_ready"] = False
1554 | self._json["version_text"] = dict()
1555 | self.save_updater_json()
1556 |
1557 | def json_reset_restore(self):
1558 | self._json["just_restored"] = False
1559 | self._json["update_ready"] = False
1560 | self._json["version_text"] = dict()
1561 | self.save_updater_json()
1562 | self._update_ready = None # Reset so you could check update again.
1563 |
1564 | def ignore_update(self):
1565 | self._json["ignore"] = True
1566 | self.save_updater_json()
1567 |
1568 | # -------------------------------------------------------------------------
1569 | # ASYNC related methods
1570 | # -------------------------------------------------------------------------
1571 | def start_async_check_update(self, now=False, callback=None):
1572 | """Start a background thread which will check for updates"""
1573 | if self._async_checking:
1574 | return
1575 | self.print_verbose("Starting background checking thread")
1576 | check_thread = threading.Thread(target=self.async_check_update,
1577 | args=(now, callback,))
1578 | check_thread.daemon = True
1579 | self._check_thread = check_thread
1580 | check_thread.start()
1581 |
1582 | def async_check_update(self, now, callback=None):
1583 | """Perform update check, run as target of background thread"""
1584 | self._async_checking = True
1585 | self.print_verbose("Checking for update now in background")
1586 |
1587 | try:
1588 | self.check_for_update(now=now)
1589 | except Exception as exception:
1590 | print("Checking for update error:")
1591 | print(exception)
1592 | self.print_trace()
1593 | if not self._error:
1594 | self._update_ready = False
1595 | self._update_version = None
1596 | self._update_link = None
1597 | self._error = "Error occurred"
1598 | self._error_msg = "Encountered an error while checking for updates"
1599 |
1600 | self._async_checking = False
1601 | self._check_thread = None
1602 |
1603 | if callback:
1604 | self.print_verbose("Finished check update, doing callback")
1605 | callback(self._update_ready)
1606 | self.print_verbose("BG thread: Finished check update, no callback")
1607 |
1608 | def stop_async_check_update(self):
1609 | """Method to give impression of stopping check for update.
1610 |
1611 | Currently does nothing but allows user to retry/stop blocking UI from
1612 | hitting a refresh button. This does not actually stop the thread, as it
1613 | will complete after the connection timeout regardless. If the thread
1614 | does complete with a successful response, this will be still displayed
1615 | on next UI refresh (ie no update, or update available).
1616 | """
1617 | if self._check_thread is not None:
1618 | self.print_verbose("Thread will end in normal course.")
1619 | # however, "There is no direct kill method on a thread object."
1620 | # better to let it run its course
1621 | # self._check_thread.stop()
1622 | self._async_checking = False
1623 | self._error = None
1624 | self._error_msg = None
1625 |
1626 |
1627 | # -----------------------------------------------------------------------------
1628 | # Updater Engines
1629 | # -----------------------------------------------------------------------------
1630 |
1631 |
1632 | class BitbucketEngine:
1633 | """Integration to Bitbucket API for git-formatted repositories"""
1634 |
1635 | def __init__(self):
1636 | self.api_url = 'https://api.bitbucket.org'
1637 | self.token = None
1638 | self.name = "bitbucket"
1639 |
1640 | def form_repo_url(self, updater):
1641 | return "{}/2.0/repositories/{}/{}".format(
1642 | self.api_url, updater.user, updater.repo)
1643 |
1644 | def form_tags_url(self, updater):
1645 | return self.form_repo_url(updater) + "/refs/tags?sort=-name"
1646 |
1647 | def form_branch_url(self, branch, updater):
1648 | return self.get_zip_url(branch, updater)
1649 |
1650 | def get_zip_url(self, name, updater):
1651 | return "https://bitbucket.org/{user}/{repo}/get/{name}.zip".format(
1652 | user=updater.user,
1653 | repo=updater.repo,
1654 | name=name)
1655 |
1656 | def parse_tags(self, response, updater):
1657 | if response is None:
1658 | return list()
1659 | return [
1660 | {
1661 | "name": tag["name"],
1662 | "zipball_url": self.get_zip_url(tag["name"], updater)
1663 | } for tag in response["values"]]
1664 |
1665 |
1666 | class GithubEngine:
1667 | """Integration to Github API"""
1668 |
1669 | def __init__(self):
1670 | self.api_url = 'https://api.github.com'
1671 | self.token = None
1672 | self.name = "github"
1673 |
1674 | def form_repo_url(self, updater):
1675 | return "{}/repos/{}/{}".format(
1676 | self.api_url, updater.user, updater.repo)
1677 |
1678 | def form_tags_url(self, updater):
1679 | if updater.use_releases:
1680 | return "{}/releases".format(self.form_repo_url(updater))
1681 | else:
1682 | return "{}/tags".format(self.form_repo_url(updater))
1683 |
1684 | def form_branch_list_url(self, updater):
1685 | return "{}/branches".format(self.form_repo_url(updater))
1686 |
1687 | def form_branch_url(self, branch, updater):
1688 | return "{}/zipball/{}".format(self.form_repo_url(updater), branch)
1689 |
1690 | def parse_tags(self, response, updater):
1691 | if response is None:
1692 | return list()
1693 | return response
1694 |
1695 |
1696 | class GitlabEngine:
1697 | """Integration to GitLab API"""
1698 |
1699 | def __init__(self):
1700 | self.api_url = 'https://gitlab.com'
1701 | self.token = None
1702 | self.name = "gitlab"
1703 |
1704 | def form_repo_url(self, updater):
1705 | return "{}/api/v4/projects/{}".format(self.api_url, updater.repo)
1706 |
1707 | def form_tags_url(self, updater):
1708 | return "{}/repository/tags".format(self.form_repo_url(updater))
1709 |
1710 | def form_branch_list_url(self, updater):
1711 | # does not validate branch name.
1712 | return "{}/repository/branches".format(
1713 | self.form_repo_url(updater))
1714 |
1715 | def form_branch_url(self, branch, updater):
1716 | # Could clash with tag names and if it does, it will download TAG zip
1717 | # instead of branch zip to get direct path, would need.
1718 | return "{}/repository/archive.zip?sha={}".format(
1719 | self.form_repo_url(updater), branch)
1720 |
1721 | def get_zip_url(self, sha, updater):
1722 | return "{base}/repository/archive.zip?sha={sha}".format(
1723 | base=self.form_repo_url(updater),
1724 | sha=sha)
1725 |
1726 | # def get_commit_zip(self, id, updater):
1727 | # return self.form_repo_url(updater)+"/repository/archive.zip?sha:"+id
1728 |
1729 | def parse_tags(self, response, updater):
1730 | if response is None:
1731 | return list()
1732 | return [
1733 | {
1734 | "name": tag["name"],
1735 | "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)
1736 | } for tag in response]
1737 |
1738 |
1739 | # -----------------------------------------------------------------------------
1740 | # The module-shared class instance,
1741 | # should be what's imported to other files
1742 | # -----------------------------------------------------------------------------
1743 |
1744 | Updater = SingletonUpdater()
1745 |
--------------------------------------------------------------------------------