-import bpy, math
+#
+# =============================================================================
+#
+# Copyright . . . -----, ,----- ,---. .---.
+# 2021-2023 |\ /| | / | | | | /|
+# | \ / | +-- / +----- +---' | / |
+# | \ / | | / | | \ | / |
+# | \/ | | / | | \ | / |
+# ' ' '--' [] '----- '----- ' ' '---' SOFTWARE
+#
+# =============================================================================
+#
+# Python exporter for Blender, compiles .mdl format for Skate Rift.
+#
+# Its really slow, sorry, I don't know how to speed it up.
+# Also not sure why you need to put # before {} in code blocks, there is errors
+# otherwise
+#
+
+import bpy, math, gpu, os
+import cProfile
from ctypes import *
+from mathutils import *
+from gpu_extras.batch import batch_for_shader
-class model(Structure):
- _pack_ = 1
- _fields_ = [("identifier",c_uint32),
- ("vertex_count",c_uint32),
- ("indice_count",c_uint32),
- ("layer_count",c_uint32)]
+bl_info = {
+ "name":"Skate Rift model compiler",
+ "author": "Harry Godden (hgn)",
+ "version": (0,2),
+ "blender":(3,1,0),
+ "location":"Export",
+ "descriptin":"",
+ "warning":"",
+ "wiki_url":"",
+ "category":"Import/Export",
+}
-class sdf_primative(Structure):
- _pack_ = 1
- _fields_ = [("origin",c_float*4),
- ("info",c_float*4)]
+class mdl_vert(Structure): # 48 bytes. Quite large. Could compress
+#{ # the normals and uvs to i16s. Not an
+ _pack_ = 1 # real issue, yet.
+ _fields_ = [("co",c_float*3),
+ ("norm",c_float*3),
+ ("uv",c_float*2),
+ ("colour",c_uint8*4),
+ ("weights",c_uint16*4),
+ ("groups",c_uint8*4)]
+#}
-class submodel(Structure):
+class mdl_submesh(Structure):
+#{
_pack_ = 1
_fields_ = [("indice_start",c_uint32),
("indice_count",c_uint32),
("vertex_start",c_uint32),
("vertex_count",c_uint32),
("bbx",(c_float*3)*2),
- ("pivot",c_float*3),
- ("sdf",sdf_primative),
- ("sdf_type",c_int32),
- ("name",c_char*32)]
+ ("material_id",c_uint32)] # index into the material array
+#}
-class model_vert(Structure):
+class mdl_texture(Structure):
+#{
_pack_ = 1
- _fields_ = [("co",c_float*3),
- ("norm",c_float*3),
+ _fields_ = [("pstr_name",c_uint32),
+ ("pack_offset",c_uint32),
+ ("pack_length",c_uint32)]
+#}
+
+class mdl_material(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("pstr_name",c_uint32),
+ ("shader",c_uint32),
+ ("flags",c_uint32),
+ ("surface_prop",c_uint32),
("colour",c_float*4),
- ("uv",c_float*2)]
+ ("colour1",c_float*4),
+ ("tex_diffuse",c_uint32),
+ ("tex_decal",c_uint32),
+ ("tex_normal",c_uint32)]
+#}
-def fixed_string(dest,string):
- return
- for i in range(len(string)):
- dest[i] = string[i]
+class mdl_node(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("co",c_float*3),
+ ( "q",c_float*4),
+ ( "s",c_float*3),
+ ("sub_uid",c_uint32), # dont use
+ ("submesh_start",c_uint32),
+ ("submesh_count",c_uint32),
+ ("classtype",c_uint32),
+ ("offset",c_uint32),
+ ("parent",c_uint32),
+ ("pstr_name",c_uint32)]
+#}
-def write_model(name):
- fp = open(F"/home/harry/Documents/carve/models/{name}.mdl", "wb")
- collection = bpy.data.collections[name]
-
- header = model()
- header.identifier = 0xABCD0000
- header.vertex_count = 0
- header.indice_count = 0
- header.layer_count = 0
+class mdl_header(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("identifier",c_uint32),
+ ("version",c_uint32),
+ ("file_length",c_uint32),
+ ("pad0",c_uint32),
+
+ ("node_count",c_uint32),
+ ("node_offset",c_uint32),
+
+ ("submesh_count",c_uint32),
+ ("submesh_offset",c_uint32),
+
+ ("material_count",c_uint32),
+ ("material_offset",c_uint32),
+
+ ("texture_count",c_uint32),
+ ("texture_offset",c_uint32),
+
+ ("anim_count",c_uint32),
+ ("anim_offset",c_uint32),
+
+ ("entdata_size",c_uint32),
+ ("entdata_offset",c_uint32),
+
+ ("strings_size",c_uint32),
+ ("strings_offset",c_uint32),
+
+ ("keyframe_count",c_uint32),
+ ("keyframe_offset",c_uint32),
+
+ ("vertex_count",c_uint32),
+ ("vertex_offset",c_uint32),
+
+ ("indice_count",c_uint32),
+ ("indice_offset",c_uint32),
+
+ ("pack_size",c_uint32),
+ ("pack_offset",c_uint32)]
+#}
+
+class mdl_animation(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("pstr_name",c_uint32),
+ ("length",c_uint32),
+ ("rate",c_float),
+ ("offset",c_uint32)]
+#}
- layers = []
- vertex_buffer = []
- indice_buffer = []
+class mdl_keyframe(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("co",c_float*3),
+ ("q",c_float*4),
+ ("s",c_float*3)]
+#}
+
+# ---------------------------------------------------------------------------- #
+# #
+# Entity definitions #
+# #
+# ---------------------------------------------------------------------------- #
+#
+# ctypes _fields_ defines the data which is filled in by:
+# def encode_obj( _, node, node_def ):
+#
+# gizmos get drawn into the viewport via:
+# @staticmethod
+# def draw_scene_helpers( obj ):
+#
+# editor enterface, simiraliy:
+# @staticmethod
+# def editor_interface( layout, obj ):
+#
+
+# Classtype 1
+#
+# Purpose: A rift. must target another gate, the target gate can not have more
+# than one target nodes of its own.
+#
+class classtype_gate(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("target",c_uint32),
+ ("dims",c_float*3)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 1
+
+ obj = node_def['obj']
+
+ if obj.cv_data.target != None:
+ _.target = obj.cv_data.target.cv_data.uid
- for obj in collection.objects:
if obj.type == 'MESH':
- dgraph = bpy.context.evaluated_depsgraph_get()
- data = obj.evaluated_get(dgraph).data
- data.calc_loop_triangles()
- data.calc_normals_split()
-
- sm = submodel()
- sm.indice_start = header.indice_count
- sm.vertex_start = header.vertex_count
- sm.vertex_count = len(data.vertices)
- sm.indice_count = len(data.loop_triangles)*3
- sm.sdf_type = 0
- sm.pivot[0] = obj.matrix_world.translation[0]
- sm.pivot[1] = obj.matrix_world.translation[2]
- sm.pivot[2] = -obj.matrix_world.translation[1]
+ #{
+ _.dims[0] = obj.data.cv_data.v0[0]
+ _.dims[1] = obj.data.cv_data.v0[1]
+ _.dims[2] = obj.data.cv_data.v0[2]
+ #}
+ else:
+ #{
+ _.dims[0] = obj.cv_data.v0[0]
+ _.dims[1] = obj.cv_data.v0[1]
+ _.dims[2] = obj.cv_data.v0[2]
+ #}
+ #}
- for i in range(3):
- sm.bbx[0][i] = 999999
- sm.bbx[1][i] = -999999
-
- if F"{obj.name}.sdf_cone" in bpy.data.objects:
- cone = bpy.data.objects[F"{obj.name}.sdf_cone"]
- sm.sdf.origin[0] = cone.location[0]
- sm.sdf.origin[1] = cone.location[2] + cone.scale[1]*2.0
- sm.sdf.origin[2] = -cone.location[1]
- sm.sdf.origin[3] = 0.0
-
- lo = cone.scale[0]
- la = cone.scale[1]*2.0
- lh = math.sqrt(lo*lo+la*la)
-
- sm.sdf.info[0] = lo
- sm.sdf.info[1] = la
- sm.sdf.info[2] = lo/lh
- sm.sdf.info[3] = la/lh
-
- sm.sdf_type = 1
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ if obj.type == 'MESH':
+ dims = obj.data.cv_data.v0
+ else:
+ dims = obj.cv_data.v0
+
+ vs = [None]*9
+ c = Vector((0,0,dims[2]))
+
+ vs[0] = obj.matrix_world @ Vector((-dims[0],0.0,-dims[1]+dims[2]))
+ vs[1] = obj.matrix_world @ Vector((-dims[0],0.0, dims[1]+dims[2]))
+ vs[2] = obj.matrix_world @ Vector(( dims[0],0.0, dims[1]+dims[2]))
+ vs[3] = obj.matrix_world @ Vector(( dims[0],0.0,-dims[1]+dims[2]))
+ vs[4] = obj.matrix_world @ (c+Vector((-1,0,-2)))
+ vs[5] = obj.matrix_world @ (c+Vector((-1,0, 2)))
+ vs[6] = obj.matrix_world @ (c+Vector(( 1,0, 2)))
+ vs[7] = obj.matrix_world @ (c+Vector((-1,0, 0)))
+ vs[8] = obj.matrix_world @ (c+Vector(( 1,0, 0)))
+
+ indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(7,8)]
+
+ for l in indices:
+ #{
+ v0 = vs[l[0]]
+ v1 = vs[l[1]]
+ cv_view_verts += [(v0[0],v0[1],v0[2])]
+ cv_view_verts += [(v1[0],v1[1],v1[2])]
+ cv_view_colours += [(1,1,0,1),(1,1,0,1)]
+ #}
+
+ sw = (0.4,0.4,0.4,0.2)
+ if obj.cv_data.target != None:
+ cv_draw_arrow( obj.location, obj.cv_data.target.location, sw )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "target" )
+
+ mesh = obj.data
+ layout.label( text=F"(i) Data is stored in {mesh.name}" )
+ layout.prop( mesh.cv_data, "v0", text="Gate dimensions" )
+ #}
+#}
+
+class classtype_nonlocal_gate(classtype_gate):
+#{
+ def encode_obj(_,node,node_def):
+ #{
+ node.classtype = 300
+
+ obj = node_def['obj']
+ _.target = encoder_process_pstr( node_def['obj'].cv_data.strp )
+
+ if obj.type == 'MESH':
+ #{
+ _.dims[0] = obj.data.cv_data.v0[0]
+ _.dims[1] = obj.data.cv_data.v0[1]
+ _.dims[2] = obj.data.cv_data.v0[2]
+ #}
+ else:
+ #{
+ _.dims[0] = obj.cv_data.v0[0]
+ _.dims[1] = obj.cv_data.v0[1]
+ _.dims[2] = obj.cv_data.v0[2]
+ #}
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "strp", text="Nonlocal ID" )
+
+ mesh = obj.data
+ layout.label( text=F"(i) Data is stored in {mesh.name}" )
+ layout.prop( mesh.cv_data, "v0", text="Gate dimensions" )
+ #}
+#}
+
+# Classtype 3
+#
+# Purpose: player can reset here, its a safe place
+# spawns can share the same name, the closest one will be picked
+#
+# when the world loads it will pick the one named 'start' first.
+#
+class classtype_spawn(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("pstr_alias",c_uint32)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 3
+ _.pstr_alias = encoder_process_pstr( node_def['obj'].cv_data.strp )
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ vs = [None]*4
+ vs[0] = obj.matrix_world @ Vector((0,0,0))
+ vs[1] = obj.matrix_world @ Vector((0,2,0))
+ vs[2] = obj.matrix_world @ Vector((0.5,1,0))
+ vs[3] = obj.matrix_world @ Vector((-0.5,1,0))
+ indices = [(0,1),(1,2),(1,3)]
+
+ for l in indices:
+ #{
+ v0 = vs[l[0]]
+ v1 = vs[l[1]]
+
+ cv_view_verts += [(v0[0],v0[1],v0[2])]
+ cv_view_verts += [(v1[0],v1[1],v1[2])]
+ cv_view_colours += [(0,1,1,1),(0,1,1,1)]
+ #}
+
+ cv_draw_sphere( obj.location, 20.0, [0.1,0,0.9,0.4] )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "strp", text="Alias" )
+ #}
+#}
+
+# Classtype 4
+#
+# Purpose: Tells the game to draw water HERE, at this entity.
+#
+class classtype_water(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("temp",c_uint32)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 4
+ # no data, spooky
+ #}
+#}
+
+# Classtype 8
+#
+# Purpose: Defines a route node and links to up to two more nodes
+#
+class classtype_route_node(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("target",c_uint32),
+ ("target1",c_uint32)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 8
+ obj = node_def['obj']
+
+ if obj.cv_data.target != None:
+ _.target = obj.cv_data.target.cv_data.uid
+ if obj.cv_data.target1 != None:
+ _.target1 = obj.cv_data.target1.cv_data.uid
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ sw = Vector((0.4,0.4,0.4,0.2))
+ sw2 = Vector((1.5,0.2,0.2,0.0))
+ if obj.cv_data.target != None:
+ cv_draw_bpath( obj, obj.cv_data.target, sw, sw )
+ if obj.cv_data.target1 != None:
+ cv_draw_bpath( obj, obj.cv_data.target1, sw, sw )
+
+ cv_draw_bhandle( obj, 1.0, (0.8,0.8,0.8,1.0) )
+ cv_draw_bhandle( obj, -1.0, (0.4,0.4,0.4,1.0) )
+
+ p1 = obj.location+ \
+ obj.matrix_world.to_quaternion() @ Vector((0,0,-6+1.5))
+ cv_draw_arrow( obj.location, p1, sw )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "target", text="Left" )
+ layout.prop( obj.cv_data, "target1", text="Right" )
+ #}
+#}
+
+# Classtype 9
+#
+# Purpose: Defines a route, its 'starting' point, and the colour to use for it
+#
+class classtype_route(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("id_start",c_uint32),
+ ("pstr_name",c_uint32),
+ ("colour",c_float*3)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 9
+ obj = node_def['obj']
+
+ _.colour[0] = obj.cv_data.colour[0]
+ _.colour[1] = obj.cv_data.colour[1]
+ _.colour[2] = obj.cv_data.colour[2]
+ _.pstr_name = encoder_process_pstr( obj.cv_data.strp )
+
+ if obj.cv_data.target != None:
+ _.id_start = obj.cv_data.target.cv_data.uid
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours, cv_view_course_i
+
+ if obj.cv_data.target:
+ cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] )
+
+ # Tries to simulate how we do it in the game
+ #
+ stack = [None]*64
+ stack_i = [0]*64
+ stack[0] = obj.cv_data.target
+ si = 1
+ loop_complete = False
+
+ while si > 0:
+ #{
+ if stack_i[si-1] == 2:
+ #{
+ si -= 1
+ continue
+
+ if si == 0: # Loop failed to complete
+ break
+ #}
+
+ node = stack[si-1]
+
+ targets = [None,None]
+ targets[0] = node.cv_data.target
+
+ if node.cv_data.classtype == 'classtype_route_node':
+ #{
+ targets[1] = node.cv_data.target1
+ #}
- sm.name = obj.name.encode('utf-8')
-
- for vert in data.vertices:
- v = model_vert()
- v.co[0] = vert.co[0]
- v.co[1] = vert.co[2]
- v.co[2] = -vert.co[1]
- v.colour[0] = 1.0
- v.colour[1] = 1.0
- v.colour[2] = 1.0
- v.colour[3] = 1.0
- vertex_buffer += [v]
+ nextnode = targets[stack_i[si-1]]
+ stack_i[si-1] += 1
+
+ if nextnode != None: # branch
+ #{
+ if nextnode == stack[0]: # Loop completed
+ #{
+ loop_complete = True
+ break
+ #}
+
+ valid=True
+ for sj in range(si):
+ #{
+ if stack[sj] == nextnode: # invalidated path
+ #{
+ valid=False
+ break
+ #}
+ #}
+
+ if valid:
+ #{
+ stack_i[si] = 0
+ stack[si] = nextnode
+ si += 1
+ continue
+ #}
+ #}
+ #}
+
+ if loop_complete:
+ #{
+ cc = Vector((obj.cv_data.colour[0],\
+ obj.cv_data.colour[1],\
+ obj.cv_data.colour[2],\
+ 1.0))
+
+ for sj in range(si):
+ #{
+ sk = (sj+1)%si
+
+ if stack[sj].cv_data.classtype == 'classtype_gate' and \
+ stack[sk].cv_data.classtype == 'classtype_gate':
+ #{
+ dist = (stack[sj].location-stack[sk].location).magnitude
+ cv_draw_sbpath( stack[sj], stack[sk], cc*0.4, cc, dist, dist )
+ #}
+ else:
+ cv_draw_bpath( stack[sj], stack[sk], cc, cc )
+ #}
+
+ cv_view_course_i += 1
+ #}
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "target", text="'Start' from" )
+ layout.prop( obj.cv_data, "colour" )
+ layout.prop( obj.cv_data, "strp", text="Name" )
+ #}
+#}
+
+# Classtype 12
+#
+# Purpose: links an mesh node to a type 11
+#
+class classtype_skin(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("skeleton",c_uint32)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 12
+
+ armature_def = node_def['linked_armature']
+ _.skeleton = armature_def['obj'].cv_data.uid
+ #}
+#}
+
+# Classtype 11
+#
+# Purpose: defines the allocation requirements for a skeleton
+#
+class classtype_skeleton(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("channels",c_uint32),
+ ("ik_count",c_uint32),
+ ("collider_count",c_uint32),
+ ("anim_start",c_uint32),
+ ("anim_count",c_uint32)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 11
+
+ _.channels = len( node_def['bones'] )
+ _.ik_count = node_def['ik_count']
+ _.collider_count = node_def['collider_count']
+ _.anim_start = node_def['anim_start']
+ _.anim_count = node_def['anim_count']
+ #}
+#}
+
+
+# Classtype 10
+#
+# Purpose: intrinsic bone type, stores collision information and limits too
+#
+class classtype_bone(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("flags",c_uint32),
+ ("ik_target",c_uint32),
+ ("ik_pole",c_uint32),
+ ("hitbox",(c_float*3)*2),
+ ("conevx",c_float*3),
+ ("conevy",c_float*3),
+ ("coneva",c_float*3),
+ ("conet",c_float)]
+
+ def encode_obj(_, node,node_def):
+ #{
+ node.classtype = 10
+
+ armature_def = node_def['linked_armature']
+ obj = node_def['bone']
+
+ _.flags = node_def['deform']
+
+ if 'ik_target' in node_def:
+ #{
+ _.flags |= 0x2
+ _.ik_target = armature_def['bones'].index( node_def['ik_target'] )
+ _.ik_pole = armature_def['bones'].index( node_def['ik_pole'] )
+ #}
+
+ # For ragdolls
+ #
+ if obj.cv_data.collider != 'collider_none':
+ #{
+ if obj.cv_data.collider == 'collider_box':
+ _.flags |= 0x4
+ else:
+ _.flags |= 0x8
+
+ _.hitbox[0][0] = obj.cv_data.v0[0]
+ _.hitbox[0][1] = obj.cv_data.v0[2]
+ _.hitbox[0][2] = -obj.cv_data.v1[1]
+ _.hitbox[1][0] = obj.cv_data.v1[0]
+ _.hitbox[1][1] = obj.cv_data.v1[2]
+ _.hitbox[1][2] = -obj.cv_data.v0[1]
+ #}
+
+ if obj.cv_data.con0:
+ #{
+ _.flags |= 0x100
+ _.conevx[0] = obj.cv_data.conevx[0]
+ _.conevx[1] = obj.cv_data.conevx[2]
+ _.conevx[2] = -obj.cv_data.conevx[1]
+ _.conevy[0] = obj.cv_data.conevy[0]
+ _.conevy[1] = obj.cv_data.conevy[2]
+ _.conevy[2] = -obj.cv_data.conevy[1]
+ _.coneva[0] = obj.cv_data.coneva[0]
+ _.coneva[1] = obj.cv_data.coneva[2]
+ _.coneva[2] = -obj.cv_data.coneva[1]
+ _.conet = obj.cv_data.conet
+ #}
+ #}
+#}
+
+# Classtype 100
+#
+# Purpose: sends a signal to another entity
+#
+class classtype_trigger(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("target",c_uint32)]
+
+ def encode_obj(_, node,node_def ):
+ #{
+ node.classtype = 100
+ if node_def['obj'].cv_data.target:
+ _.target = node_def['obj'].cv_data.target.cv_data.uid
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+ cv_draw_ucube( obj.matrix_world, [0,1,0,1] )
+
+ if obj.cv_data.target:
+ cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "target", text="Triggers" )
+ #}
+#}
+
+# Classtype 101
+#
+# Purpose: Gives the player an achievement.
+# No cheating! You shouldn't use this entity anyway, since only ME can
+# add achievements to the steam ;)
+#
+class classtype_logic_achievement(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("pstr_name",c_uint32)]
+
+ def encode_obj(_, node,node_def ):
+ #{
+ node.classtype = 101
+ _.pstr_name = encoder_process_pstr( node_def['obj'].cv_data.strp )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "strp", text="Achievement ID" )
+ #}
+#}
+
+# Classtype 102
+#
+# Purpose: sends a signal to another entity
+#
+class classtype_logic_relay(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("targets",c_uint32*4)]
+
+ def encode_obj(_, node,node_def ):
+ #{
+ node.classtype = 102
+ obj = node_def['obj']
+ if obj.cv_data.target:
+ _.targets[0] = obj.cv_data.target.cv_data.uid
+ if obj.cv_data.target1:
+ _.targets[1] = obj.cv_data.target1.cv_data.uid
+ if obj.cv_data.target2:
+ _.targets[2] = obj.cv_data.target2.cv_data.uid
+ if obj.cv_data.target3:
+ _.targets[3] = obj.cv_data.target3.cv_data.uid
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ if obj.cv_data.target:
+ cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] )
+ if obj.cv_data.target1:
+ cv_draw_arrow( obj.location, obj.cv_data.target1.location, [1,1,1,1] )
+ if obj.cv_data.target2:
+ cv_draw_arrow( obj.location, obj.cv_data.target2.location, [1,1,1,1] )
+ if obj.cv_data.target3:
+ cv_draw_arrow( obj.location, obj.cv_data.target3.location, [1,1,1,1] )
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "target", text="Triggers" )
+ layout.prop( obj.cv_data, "target1", text="Triggers" )
+ layout.prop( obj.cv_data, "target2", text="Triggers" )
+ layout.prop( obj.cv_data, "target3", text="Triggers" )
+ #}
+#}
+
+# Classtype 14
+#
+# Purpose: Plays some audio (44100hz .ogg vorbis only)
+# NOTE: There is a 32mb limit on the audio buffer, world audio is
+# decompressed and stored in signed 16 bit integers (2 bytes)
+# per sample.
+#
+# volume: not used if has 3D flag
+# flags:
+# AUDIO_FLAG_LOOP 0x1
+# AUDIO_FLAG_ONESHOT 0x2 (DONT USE THIS, it breaks semaphores)
+# AUDIO_FLAG_SPACIAL_3D 0x4 (Probably what you want)
+# AUDIO_FLAG_AUTO_START 0x8 (Play when the world starts)
+# ......
+# the rest are just internal flags, only use the above 3.
+#
+class classtype_audio(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("pstr_file",c_uint32),
+ ("flags",c_uint32),
+ ("volume",c_float)]
+
+ def encode_obj(_, node,node_def ):
+ #{
+ node.classtype = 14
+
+ obj = node_def['obj']
+
+ _.pstr_file = encoder_process_pstr( obj.cv_data.strp )
+
+ flags = 0x00
+ if obj.cv_data.bp0: flags |= 0x1
+ if obj.cv_data.bp1: flags |= 0x4
+ if obj.cv_data.bp2: flags |= 0x8
+
+ if obj.cv_data.audio_format == 'stereo':
+ flags |= 0x200
+ if obj.cv_data.audio_format == 'remain compressed':
+ flags |= 0x400
+
+ _.flags = flags
+ _.volume = obj.cv_data.fltp
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ layout.prop( obj.cv_data, "strp", text = "File (.ogg)" )
+
+ layout.prop( obj.cv_data, "bp0", text = "Looping" )
+ layout.prop( obj.cv_data, "bp1", text = "3D Audio" )
+ layout.prop( obj.cv_data, "bp2", text = "Auto Start" )
+ layout.prop( obj.cv_data, "audio_format" )
+
+ layout.prop( obj.cv_data, "fltp", text = "Volume (0-1)" )
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ cv_draw_sphere( obj.location, obj.scale[0], [1,1,0,1] )
+ #}
+#}
+
+# Classtype 200
+#
+# Purpose: world light
+#
+class classtype_world_light( Structure ):
+#{
+ _pack_ = 1
+ _fields_ = [("type",c_uint32),
+ ("colour",c_float*4),
+ ("angle",c_float),
+ ("range",c_float)]
+
+ def encode_obj(_, node, node_def):
+ #{
+ node.classtype = 200
+
+ obj = node_def['obj']
+ data = obj.data
+ _.colour[0] = data.color[0]
+ _.colour[1] = data.color[1]
+ _.colour[2] = data.color[2]
+ _.colour[3] = data.energy
+ _.range = data.cutoff_distance # this has to be manually set
+ # TODO: At some point, automate a min
+ # threshold value
+
+ if obj.data.type == 'POINT':
+ #{
+ _.type = 0
+ _.angle = 0.0
+ #}
+ elif obj.data.type == 'SPOT':
+ #{
+ _.type = 1
+ _.angle = data.spot_size*0.5
+ #}
+
+ if data.cv_data.bp0:
+ _.type += 2
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ pass
+ #}
+#}
+
+# Classtype 201
+#
+# Purpose: lighting settings for world
+#
+class classtype_lighting_info(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("colours",(c_float*3)*3),
+ ("directions",(c_float*2)*3),
+ ("states",c_uint32*3),
+ ("shadow_spread",c_float),
+ ("shadow_length",c_float),
+ ("ambient",c_float*3)]
+
+ def encode_obj(_, node, node_def):
+ #{
+ node.classtype = 201
+
+ # TODO
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ pass
+ #}
+#}
+
+class classtype_spawn_link(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("connections",c_uint32*4)]
+
+ def encode_obj(_, node,node_def ):
+ #{
+ node.classtype = 0
+ #}
+
+ @staticmethod
+ def editor_interface( layout, obj ):
+ #{
+ pass
+ #}
+
+ @staticmethod
+ def draw_scene_helpers( obj ):
+ #{
+ global cv_view_verts, cv_view_colours
+
+ count = 0
+
+ for obj1 in bpy.context.collection.objects:
+ #{
+ if (obj1.cv_data.classtype != 'classtype_spawn_link') and \
+ (obj1.cv_data.classtype != 'classtype_spawn') :
+ continue
+
+ if (obj1.location - obj.location).length < 40.0:
+ #{
+ cv_draw_line( obj.location, obj1.location, [1,1,1,1] )
+ count +=1
+ #}
+
+ if count == 4:
+ break
+ #}
+
+ cv_draw_sphere( obj.location, 20.0, [0.5,0,0.2,0.4] )
+ #}
+#}
+
+# ---------------------------------------------------------------------------- #
+# #
+# Compiler section #
+# #
+# ---------------------------------------------------------------------------- #
+
+# Current encoder state
+#
+g_encoder = None
+
+# Reset encoder
+#
+def encoder_init( collection ):
+#{
+ global g_encoder
+
+ g_encoder = \
+ {
+ # The actual file header
+ #
+ 'header': mdl_header(),
+
+ # Options
+ #
+ 'pack_textures': collection.cv_data.pack_textures,
+
+ # Compiled data chunks (each can be read optionally by the client)
+ #
+ 'data':
+ {
+ #1---------------------------------
+ 'node': [], # Metadata 'chunk'
+ 'submesh': [],
+ 'material': [],
+ 'texture': [],
+ 'anim': [],
+ 'entdata': bytearray(), # variable width
+ 'strings': bytearray(), # .
+ #2---------------------------------
+ 'keyframe': [], # Animations
+ #3---------------------------------
+ 'vertex': [], # Mesh data
+ 'indice': [],
+ #4---------------------------------
+ 'pack': bytearray() # Other generic packed data
+ },
+
+ # All objects of the model in their final heirachy
+ #
+ "uid_count": 1,
+ "scene_graph":{},
+ "graph_lookup":{},
+
+ # Allows us to reuse definitions
+ #
+ 'string_cache':{},
+ 'mesh_cache': {},
+ 'material_cache': {},
+ 'texture_cache': {}
+ }
+
+ g_encoder['header'].identifier = 0xABCD0000
+ g_encoder['header'].version = 1
+
+ # Add fake NoneID material and texture
+ #
+ none_material = mdl_material()
+ none_material.pstr_name = encoder_process_pstr( "" )
+ none_material.texture_id = 0
+
+ none_texture = mdl_texture()
+ none_texture.pstr_name = encoder_process_pstr( "" )
+ none_texture.pack_offset = 0
+ none_texture.pack_length = 0
+
+ g_encoder['data']['material'] += [none_material]
+ g_encoder['data']['texture'] += [none_texture]
+
+ g_encoder['data']['pack'].extend( b'datapack\0\0\0\0\0\0\0\0' )
+
+ # Add root node
+ #
+ root = mdl_node()
+ root.co[0] = 0
+ root.co[1] = 0
+ root.co[2] = 0
+ root.q[0] = 0
+ root.q[1] = 0
+ root.q[2] = 0
+ root.q[3] = 1
+ root.s[0] = 1
+ root.s[1] = 1
+ root.s[2] = 1
+ root.pstr_name = encoder_process_pstr('')
+ root.submesh_start = 0
+ root.submesh_count = 0
+ root.offset = 0
+ root.classtype = 0
+ root.parent = 0xffffffff
+
+ g_encoder['data']['node'] += [root]
+#}
+
+
+# fill with 0x00 until a multiple of align. Returns how many bytes it added
+#
+def bytearray_align_to( buffer, align, offset=0 ):
+#{
+ count = 0
+
+ while ((len(buffer)+offset) % align) != 0:
+ #{
+ buffer.extend( b'\0' )
+ count += 1
+ #}
+
+ return count
+#}
+
+# Add a string to the string buffer except if it already exists there then we
+# just return its ID.
+#
+def encoder_process_pstr( s ):
+#{
+ global g_encoder
+
+ cache = g_encoder['string_cache']
+
+ if s in cache:
+ return cache[s]
+
+ cache[s] = len( g_encoder['data']['strings'] )
+
+ buffer = g_encoder['data']['strings']
+ buffer.extend( s.encode('utf-8') )
+ buffer.extend( b'\0' )
+
+ bytearray_align_to( buffer, 4 )
+ return cache[s]
+#}
+
+def get_texture_resource_name( img ):
+#{
+ return os.path.splitext( img.name )[0]
+#}
+
+# Pack a texture
+#
+def encoder_process_texture( img ):
+#{
+ global g_encoder
+
+ if img == None:
+ return 0
+
+ cache = g_encoder['texture_cache']
+ buffer = g_encoder['data']['texture']
+ pack = g_encoder['data']['pack']
+
+ name = get_texture_resource_name( img )
+
+ if name in cache:
+ return cache[name]
+
+ cache[name] = len( buffer )
+
+ tex = mdl_texture()
+ tex.pstr_name = encoder_process_pstr( name )
+
+ if g_encoder['pack_textures']:
+ #{
+ tex.pack_offset = len( pack )
+ pack.extend( qoi_encode( img ) )
+ tex.pack_length = len( pack ) - tex.pack_offset
+ #}
+ else:
+ tex.pack_offset = 0
+
+ buffer += [ tex ]
+ return cache[name]
+#}
+
+def material_tex_image(v):
+#{
+ return {
+ "Image Texture":
+ {
+ "image": F"{v}"
+ }
+ }
+#}
+
+cxr_graph_mapping = \
+{
+ # Default shader setup
+ "Principled BSDF":
+ {
+ "Base Color":
+ {
+ "Image Texture":
+ {
+ "image": "tex_diffuse"
+ },
+ "Mix":
+ {
+ "A": material_tex_image("tex_diffuse"),
+ "B": material_tex_image("tex_decal")
+ },
+ },
+ "Normal":
+ {
+ "Normal Map":
+ {
+ "Color": material_tex_image("tex_normal")
+ }
+ }
+ }
+}
+
+# https://harrygodden.com/git/?p=convexer.git;a=blob;f=__init__.py;#l1164
+#
+def material_info(mat):
+#{
+ info = {}
+
+ # Using the cv_graph_mapping as a reference, go through the shader
+ # graph and gather all $props from it.
+ #
+ def _graph_read( node_def, node=None, depth=0 ):
+ #{
+ nonlocal mat
+ nonlocal info
+
+ # Find rootnodes
+ #
+ if node == None:
+ #{
+ _graph_read.extracted = []
+
+ for node_idname in node_def:
+ #{
+ for n in mat.node_tree.nodes:
+ #{
+ if n.name == node_idname:
+ #{
+ node_def = node_def[node_idname]
+ node = n
+ break
+ #}
+ #}
+ #}
+ #}
+
+ for link in node_def:
+ #{
+ link_def = node_def[link]
+
+ if isinstance( link_def, dict ):
+ #{
+ node_link = None
+ for x in node.inputs:
+ #{
+ if isinstance( x, bpy.types.NodeSocketColor ):
+ #{
+ if link == x.name:
+ #{
+ node_link = x
+ break
+ #}
+ #}
+ #}
+
+ if node_link and node_link.is_linked:
+ #{
+ # look for definitions for the connected node type
+ #
+ from_node = node_link.links[0].from_node
+
+ node_name = from_node.name.split('.')[0]
+ if node_name in link_def:
+ #{
+ from_node_def = link_def[ node_name ]
+
+ _graph_read( from_node_def, from_node, depth+1 )
+ #}
+
+ # No definition! :(
+ # TODO: Make a warning for this?
+ #}
+ else:
+ #{
+ if "default" in link_def:
+ #{
+ prop = link_def['default']
+ info[prop] = node_link.default_value
+ #}
+ #}
+ #}
+ else:
+ #{
+ prop = link_def
+ info[prop] = getattr( node, link )
+ #}
+ #}
+ #}
+
+ _graph_read( cxr_graph_mapping )
+ return info
+#}
+
+# Add a material to the material buffer. Returns 0 (None ID) if invalid
+#
+def encoder_process_material( mat ):
+#{
+ global g_encoder
+
+ if mat == None:
+ return 0
+
+ cache = g_encoder['material_cache']
+ buffer = g_encoder['data']['material']
+
+ if mat.name in cache:
+ return cache[mat.name]
+
+ cache[mat.name] = len( buffer )
+
+ dest = mdl_material()
+ dest.pstr_name = encoder_process_pstr( mat.name )
+
+ flags = 0x00
+ if mat.cv_data.collision:
+ flags |= 0x2
+ if mat.cv_data.skate_surface: flags |= 0x1
+ if mat.cv_data.grind_surface: flags |= (0x8|0x1)
+
+ if mat.cv_data.grow_grass: flags |= 0x4
+ dest.flags = flags
+
+ if mat.cv_data.surface_prop == 'concrete': dest.surface_prop = 0
+ if mat.cv_data.surface_prop == 'wood': dest.surface_prop = 1
+ if mat.cv_data.surface_prop == 'grass': dest.surface_prop = 2
+ if mat.cv_data.surface_prop == 'tiles': dest.surface_prop = 3
+
+ if mat.cv_data.shader == 'standard': dest.shader = 0
+ if mat.cv_data.shader == 'standard_cutout': dest.shader = 1
+ if mat.cv_data.shader == 'terrain_blend':
+ #{
+ dest.shader = 2
+
+ dest.colour[0] = pow( mat.cv_data.sand_colour[0], 1.0/2.2 )
+ dest.colour[1] = pow( mat.cv_data.sand_colour[1], 1.0/2.2 )
+ dest.colour[2] = pow( mat.cv_data.sand_colour[2], 1.0/2.2 )
+ dest.colour[3] = 1.0
+
+ dest.colour1[0] = mat.cv_data.blend_offset[0]
+ dest.colour1[1] = mat.cv_data.blend_offset[1]
+ #}
+
+ if mat.cv_data.shader == 'vertex_blend':
+ #{
+ dest.shader = 3
+
+ dest.colour1[0] = mat.cv_data.blend_offset[0]
+ dest.colour1[1] = mat.cv_data.blend_offset[1]
+ #}
+
+ if mat.cv_data.shader == 'water':
+ #{
+ dest.shader = 4
+
+ dest.colour[0] = pow( mat.cv_data.shore_colour[0], 1.0/2.2 )
+ dest.colour[1] = pow( mat.cv_data.shore_colour[1], 1.0/2.2 )
+ dest.colour[2] = pow( mat.cv_data.shore_colour[2], 1.0/2.2 )
+ dest.colour[3] = 1.0
+ dest.colour1[0] = pow( mat.cv_data.ocean_colour[0], 1.0/2.2 )
+ dest.colour1[1] = pow( mat.cv_data.ocean_colour[1], 1.0/2.2 )
+ dest.colour1[2] = pow( mat.cv_data.ocean_colour[2], 1.0/2.2 )
+ dest.colour1[3] = 1.0
+ #}
+
+ inf = material_info( mat )
+
+ if mat.cv_data.shader == 'standard' or \
+ mat.cv_data.shader == 'standard_cutout' or \
+ mat.cv_data.shader == 'terrain_blend' or \
+ mat.cv_data.shader == 'vertex_blend':
+ #{
+ if 'tex_diffuse' in inf:
+ dest.tex_diffuse = encoder_process_texture(inf['tex_diffuse'])
+ #}
+
+ buffer += [dest]
+ return cache[mat.name]
+#}
+
+# Create a tree structure containing all the objects in the collection
+#
+def encoder_build_scene_graph( collection ):
+#{
+ global g_encoder
+
+ print( " creating scene graph" )
+
+ # initialize root
+ #
+ graph = g_encoder['scene_graph']
+ graph_lookup = g_encoder['graph_lookup']
+ graph["obj"] = None
+ graph["depth"] = 0
+ graph["children"] = []
+ graph["uid"] = 0
+ graph["parent"] = None
+
+ def _new_uid():
+ #{
+ global g_encoder
+ uid = g_encoder['uid_count']
+ g_encoder['uid_count'] += 1
+ return uid
+ #}
+
+ for obj in collection.all_objects:
+ #{
+ #if obj.parent: continue
+
+ def _extend( p, n, d ):
+ #{
+ nonlocal collection
+
+ uid = _new_uid()
+ tree = {}
+ tree["uid"] = uid
+ tree["children"] = []
+ tree["depth"] = d
+ tree["obj"] = n
+ tree["parent"] = p
+ n.cv_data.uid = uid
+
+ # Descend into amature
+ #
+ if n.type == 'ARMATURE':
+ #{
+ tree["bones"] = [None] # None is the root transform
+ tree["ik_count"] = 0
+ tree["collider_count"] = 0
+ tree["compile_animation"] = collection.cv_data.animations
- for i in range(3):
- sm.bbx[0][i] = min( sm.bbx[0][i], v.co[i] )
- sm.bbx[1][i] = max( sm.bbx[1][i], v.co[i] )
-
- for l in data.loops:
- pvert = vertex_buffer[l.vertex_index + sm.vertex_start]
- norm = l.normal
- pvert.norm[0] = norm[0]
- pvert.norm[1] = norm[2]
- pvert.norm[2] = -norm[1]
-
- #if data.vertex_colors:
- # colour = data.vertex_colors.active.data[ l.index ].color
- # pvert.colour[0] = colour[0]
+ # Here also collects some information about constraints, ik and
+ # counts colliders for the armature.
+ #
+ def _extendb( p, n, d ):
+ #{
+ nonlocal tree
+
+ btree = {}
+ btree["bone"] = n
+ btree["linked_armature"] = tree
+ btree["uid"] = _new_uid()
+ btree["children"] = []
+ btree["depth"] = d
+ btree["parent"] = p
+ tree["bones"] += [n.name]
+
+ for c in n.children:
+ #{
+ _extendb( btree, c, d+1 )
+ #}
+
+ for c in tree['obj'].pose.bones[n.name].constraints:
+ #{
+ if c.type == 'IK':
+ #{
+ btree["ik_target"] = c.subtarget
+ btree["ik_pole"] = c.pole_subtarget
+ tree["ik_count"] += 1
+ #}
+ #}
+
+ if n.cv_data.collider != 'collider_none':
+ tree['collider_count'] += 1
+
+ btree['deform'] = n.use_deform
+ p['children'] += [btree]
+ #}
+
+ for b in n.data.bones:
+ if not b.parent:
+ _extendb( tree, b, d+1 )
+ #}
- if data.uv_layers:
- uv = data.uv_layers.active.data[ l.index ].uv
- pvert.uv[0] = uv[0]
- pvert.uv[1] = uv[1]
+ # Recurse into children of this object
+ #
+ for obj1 in n.children:
+ #{
+ for c1 in obj1.users_collection:
+ #{
+ if c1 == collection:
+ #{
+ _extend( tree, obj1, d+1 )
+ break
+ #}
+ #}
+ #}
- for tri in data.loop_triangles:
- indice_buffer += [c_uint32(tri.vertices[_]) for _ in range(3)]
+ p["children"] += [tree]
+ graph_lookup[n] = tree
- layers += [sm]
- header.layer_count += 1
- header.vertex_count += sm.vertex_count
- header.indice_count += sm.indice_count
+ #}
- fp.write( bytearray( header ) )
- for l in layers:
- fp.write( bytearray(l) )
- for v in vertex_buffer:
- fp.write( bytearray(v) )
- for i in indice_buffer:
- fp.write( bytearray(i) )
+ _extend( graph, obj, 1 )
- fp.close()
+ #}
+#}
+
+
+# Kind of a useless thing i made but it looks cool and adds complexity!!1
+#
+def encoder_graph_iterator( root ):
+#{
+ for c in root['children']:
+ #{
+ yield c
+ yield from encoder_graph_iterator(c)
+ #}
+#}
+
+
+# Push a vertex into the model file, or return a cached index (c_uint32)
+#
+def encoder_vertex_push( vertex_reference, co,norm,uv,colour,groups,weights ):
+#{
+ global g_encoder
+ buffer = g_encoder['data']['vertex']
+
+ TOLERENCE = 4
+ m = float(10**TOLERENCE)
+
+ # Would be nice to know if this can be done faster than it currently runs,
+ # its quite slow.
+ #
+ key = (int(co[0]*m+0.5),
+ int(co[1]*m+0.5),
+ int(co[2]*m+0.5),
+ int(norm[0]*m+0.5),
+ int(norm[1]*m+0.5),
+ int(norm[2]*m+0.5),
+ int(uv[0]*m+0.5),
+ int(uv[1]*m+0.5),
+ colour[0], # these guys are already quantized
+ colour[1], # .
+ colour[2], # .
+ colour[3], # .
+ weights[0], # v
+ weights[1],
+ weights[2],
+ weights[3],
+ groups[0],
+ groups[1],
+ groups[2],
+ groups[3])
+
+ if key in vertex_reference:
+ return vertex_reference[key]
+ else:
+ #{
+ index = c_uint32( len(vertex_reference) )
+ vertex_reference[key] = index
+
+ v = mdl_vert()
+ v.co[0] = co[0]
+ v.co[1] = co[2]
+ v.co[2] = -co[1]
+ v.norm[0] = norm[0]
+ v.norm[1] = norm[2]
+ v.norm[2] = -norm[1]
+ v.uv[0] = uv[0]
+ v.uv[1] = uv[1]
+ v.colour[0] = colour[0]
+ v.colour[1] = colour[1]
+ v.colour[2] = colour[2]
+ v.colour[3] = colour[3]
+ v.weights[0] = weights[0]
+ v.weights[1] = weights[1]
+ v.weights[2] = weights[2]
+ v.weights[3] = weights[3]
+ v.groups[0] = groups[0]
+ v.groups[1] = groups[1]
+ v.groups[2] = groups[2]
+ v.groups[3] = groups[3]
+
+ buffer += [v]
+ return index
+ #}
+#}
+
+
+# Compile a mesh (or use one from the cache) onto node, based on node_def
+# No return value
+#
+def encoder_compile_mesh( node, node_def ):
+#{
+ global g_encoder
+
+ graph = g_encoder['scene_graph']
+ graph_lookup = g_encoder['graph_lookup']
+ mesh_cache = g_encoder['mesh_cache']
+ obj = node_def['obj']
+ armature_def = None
+ can_use_cache = True
+
+ # Check for modifiers that typically change the data per-instance
+ # there is no well defined rule for the choices here, its just what i've
+ # needed while producing the game.
+ #
+ # It may be possible to detect these cases automatically.
+ #
+ for mod in obj.modifiers:
+ #{
+ if mod.type == 'DATA_TRANSFER' or mod.type == 'SHRINKWRAP' or \
+ mod.type == 'BOOLEAN' or mod.type == 'CURVE' or \
+ mod.type == 'ARRAY':
+ #{
+ can_use_cache = False
+ #}
+
+ if mod.type == 'ARMATURE':
+ armature_def = graph_lookup[mod.object]
+
+ # Check the cache first
+ #
+ if can_use_cache and (obj.data.name in mesh_cache):
+ #{
+ ref = mesh_cache[obj.data.name]
+ node.submesh_start = ref.submesh_start
+ node.submesh_count = ref.submesh_count
+ return
+ #}
+
+ # Compile a whole new mesh
+ #
+ node.submesh_start = len( g_encoder['data']['submesh'] )
+ node.submesh_count = 0
+
+ dgraph = bpy.context.evaluated_depsgraph_get()
+ data = obj.evaluated_get(dgraph).data
+ data.calc_loop_triangles()
+ data.calc_normals_split()
+
+ # Mesh is split into submeshes based on their material
+ #
+ mat_list = data.materials if len(data.materials) > 0 else [None]
+ for material_id, mat in enumerate(mat_list):
+ #{
+ mref = {}
+
+ sm = mdl_submesh()
+ sm.indice_start = len( g_encoder['data']['indice'] )
+ sm.vertex_start = len( g_encoder['data']['vertex'] )
+ sm.vertex_count = 0
+ sm.indice_count = 0
+ sm.material_id = encoder_process_material( mat )
+
+ for i in range(3):
+ #{
+ sm.bbx[0][i] = 999999
+ sm.bbx[1][i] = -999999
+ #}
+
+ # Keep a reference to very very very similar vertices
+ #
+ vertex_reference = {}
+
+ # Write the vertex / indice data
+ #
+ for tri_index, tri in enumerate(data.loop_triangles):
+ #{
+ if tri.material_index != material_id:
+ continue
+
+ for j in range(3):
+ #{
+ vert = data.vertices[tri.vertices[j]]
+ li = tri.loops[j]
+ vi = data.loops[li].vertex_index
+
+ # Gather vertex information
+ #
+ co = vert.co
+ norm = data.loops[li].normal
+ uv = (0,0)
+ colour = (255,255,255,255)
+ groups = [0,0,0,0]
+ weights = [0,0,0,0]
+
+ # Uvs
+ #
+ if data.uv_layers:
+ uv = data.uv_layers.active.data[li].uv
+
+ # Vertex Colours
+ #
+ if data.vertex_colors:
+ #{
+ colour = data.vertex_colors.active.data[li].color
+ colour = (int(colour[0]*255.0),\
+ int(colour[1]*255.0),\
+ int(colour[2]*255.0),\
+ int(colour[3]*255.0))
+ #}
+
+ # Weight groups: truncates to the 3 with the most influence. The
+ # fourth bone ID is never used by the shader so it is
+ # always 0
+ #
+ if armature_def:
+ #{
+ src_groups = [_ for _ in data.vertices[vi].groups \
+ if obj.vertex_groups[_.group].name in \
+ armature_def['bones']]
+
+ weight_groups = sorted( src_groups, key = \
+ lambda a: a.weight, reverse=True )
+ tot = 0.0
+ for ml in range(3):
+ #{
+ if len(weight_groups) > ml:
+ #{
+ g = weight_groups[ml]
+ name = obj.vertex_groups[g.group].name
+ weight = g.weight
+
+ weights[ml] = weight
+ groups[ml] = armature_def['bones'].index(name)
+ tot += weight
+ #}
+ #}
+
+ if len(weight_groups) > 0:
+ #{
+ inv_norm = (1.0/tot) * 65535.0
+ for ml in range(3):
+ #{
+ weights[ml] = int( weights[ml] * inv_norm )
+ weights[ml] = min( weights[ml], 65535 )
+ weights[ml] = max( weights[ml], 0 )
+ #}
+ #}
+ #}
+ else:
+ #{
+ li1 = tri.loops[(j+1)%3]
+ vi1 = data.loops[li1].vertex_index
+ e0 = data.edges[ data.loops[li].edge_index ]
+
+ if e0.use_freestyle_mark and \
+ ((e0.vertices[0] == vi and e0.vertices[1] == vi1) or \
+ (e0.vertices[0] == vi1 and e0.vertices[1] == vi)):
+ #{
+ weights[0] = 1
+ #}
+ #}
+
+ # Add vertex and expand bound box
+ #
+ index = encoder_vertex_push( vertex_reference, co, \
+ norm, \
+ uv, \
+ colour, \
+ groups, \
+ weights )
+ g_encoder['data']['indice'] += [index]
+ #}
+ #}
+
+ # How many unique verts did we add in total
+ #
+ sm.vertex_count = len(g_encoder['data']['vertex']) - sm.vertex_start
+ sm.indice_count = len(g_encoder['data']['indice']) - sm.indice_start
+
+ # Make sure bounding box isn't -inf -> inf if no vertices
+ #
+ if sm.vertex_count == 0:
+ for j in range(2):
+ for i in range(3):
+ sm.bbx[j][i] = 0
+ else:
+ #{
+ for j in range(sm.vertex_count):
+ #{
+ vert = g_encoder['data']['vertex'][ sm.vertex_start + j ]
+
+ for i in range(3):
+ #{
+ sm.bbx[0][i] = min( sm.bbx[0][i], vert.co[i] )
+ sm.bbx[1][i] = max( sm.bbx[1][i], vert.co[i] )
+ #}
+ #}
+ #}
+
+ # Add submesh to encoder
+ #
+ g_encoder['data']['submesh'] += [sm]
+ node.submesh_count += 1
+
+ #}
+
+ # Save a reference to this node since we want to reuse the submesh indices
+ # later.
+ g_encoder['mesh_cache'][obj.data.name] = node
+#}
+
+
+def encoder_compile_ent_as( name, node, node_def ):
+#{
+ global g_encoder
+
+ if name == 'classtype_none':
+ #{
+ node.offset = 0
+ node.classtype = 0
+ return
+ #}
+ elif name not in globals():
+ #{
+ print( "Classtype '" +name + "' is unknown!" )
+ return
+ #}
+
+ buffer = g_encoder['data']['entdata']
+ node.offset = len(buffer)
+
+ cl = globals()[ name ]
+ inst = cl()
+ inst.encode_obj( node, node_def )
+
+ buffer.extend( bytearray(inst) )
+ bytearray_align_to( buffer, 4 )
+#}
+
+# Compiles animation data into model and gives us some extra node_def entries
+#
+def encoder_compile_armature( node, node_def ):
+#{
+ global g_encoder
+
+ entdata = g_encoder['data']['entdata']
+ animdata = g_encoder['data']['anim']
+ keyframedata = g_encoder['data']['keyframe']
+ mesh_cache = g_encoder['mesh_cache']
+ obj = node_def['obj']
+ bones = node_def['bones']
+
+ # extra info
+ node_def['anim_start'] = len(animdata)
+ node_def['anim_count'] = 0
+
+ if not node_def['compile_animation']:
+ #{
+ return
+ #}
+
+ # Compile anims
+ #
+ if obj.animation_data:
+ #{
+ # So we can restore later
+ #
+ previous_frame = bpy.context.scene.frame_current
+ previous_action = obj.animation_data.action
+ POSE_OR_REST_CACHE = obj.data.pose_position
+ obj.data.pose_position = 'POSE'
+
+ for NLALayer in obj.animation_data.nla_tracks:
+ #{
+ for NLAStrip in NLALayer.strips:
+ #{
+ # set active
+ #
+ for a in bpy.data.actions:
+ #{
+ if a.name == NLAStrip.name:
+ #{
+ obj.animation_data.action = a
+ break
+ #}
+ #}
+
+ # Clip to NLA settings
+ #
+ anim_start = int(NLAStrip.action_frame_start)
+ anim_end = int(NLAStrip.action_frame_end)
+
+ # Export strips
+ #
+ anim = mdl_animation()
+ anim.pstr_name = encoder_process_pstr( NLAStrip.action.name )
+ anim.rate = 30.0
+ anim.offset = len(keyframedata)
+ anim.length = anim_end-anim_start
+
+ # Export the keyframes
+ for frame in range(anim_start,anim_end):
+ #{
+ bpy.context.scene.frame_set(frame)
+
+ for bone_name in bones:
+ #{
+ for pb in obj.pose.bones:
+ #{
+ if pb.name != bone_name: continue
+
+ rb = obj.data.bones[ bone_name ]
+
+ # relative bone matrix
+ if rb.parent is not None:
+ #{
+ offset_mtx = rb.parent.matrix_local
+ offset_mtx = offset_mtx.inverted_safe() @ \
+ rb.matrix_local
+
+ inv_parent = pb.parent.matrix @ offset_mtx
+ inv_parent.invert_safe()
+ fpm = inv_parent @ pb.matrix
+ #}
+ else:
+ #{
+ bone_mtx = rb.matrix.to_4x4()
+ local_inv = rb.matrix_local.inverted_safe()
+ fpm = bone_mtx @ local_inv @ pb.matrix
+ #}
+
+ loc, rot, sca = fpm.decompose()
+
+ # local position
+ final_pos = Vector(( loc[0], loc[2], -loc[1] ))
+
+ # rotation
+ lc_m = pb.matrix_channel.to_3x3()
+ if pb.parent is not None:
+ #{
+ smtx = pb.parent.matrix_channel.to_3x3()
+ lc_m = smtx.inverted() @ lc_m
+ #}
+ rq = lc_m.to_quaternion()
+
+ kf = mdl_keyframe()
+ kf.co[0] = final_pos[0]
+ kf.co[1] = final_pos[1]
+ kf.co[2] = final_pos[2]
+
+ kf.q[0] = rq[1]
+ kf.q[1] = rq[3]
+ kf.q[2] = -rq[2]
+ kf.q[3] = rq[0]
+
+ # scale
+ kf.s[0] = sca[0]
+ kf.s[1] = sca[2]
+ kf.s[2] = sca[1]
+
+ keyframedata += [kf]
+ break
+ #}
+ #}
+ #}
+
+ # Add to animation buffer
+ #
+ animdata += [anim]
+ node_def['anim_count'] += 1
+
+ # Report progress
+ #
+ status_name = F" " + " |"*(node_def['depth']-1)
+ print( F"{status_name} | *anim: {NLAStrip.action.name}" )
+ #}
+ #}
+
+ # Restore context to how it was before
+ #
+ bpy.context.scene.frame_set( previous_frame )
+ obj.animation_data.action = previous_action
+ obj.data.pose_position = POSE_OR_REST_CACHE
+ #}
+#}
+
+# We are trying to compile this node_def
+#
+def encoder_process_definition( node_def ):
+#{
+ global g_encoder
+
+ # data sources for object/bone are taken differently
+ #
+ if 'obj' in node_def:
+ #{
+ obj = node_def['obj']
+ obj_type = obj.type
+ obj_co = obj.matrix_world @ Vector((0,0,0))
+
+ if obj_type == 'ARMATURE':
+ obj_classtype = 'classtype_skeleton'
+ elif obj_type == 'LIGHT':
+ #{
+ obj_classtype = 'classtype_world_light'
+ #}
+ else:
+ #{
+ obj_classtype = obj.cv_data.classtype
+
+ # Check for armature deform
+ #
+ for mod in obj.modifiers:
+ #{
+ if mod.type == 'ARMATURE':
+ #{
+ obj_classtype = 'classtype_skin'
+
+ # Make sure to freeze armature in rest while we collect
+ # vertex information
+ #
+ armature_def = g_encoder['graph_lookup'][mod.object]
+ POSE_OR_REST_CACHE = armature_def['obj'].data.pose_position
+ armature_def['obj'].data.pose_position = 'REST'
+ node_def['linked_armature'] = armature_def
+ break
+ #}
+ #}
+ #}
+ #}
+
+ elif 'bone' in node_def:
+ #{
+ obj = node_def['bone']
+ obj_type = 'BONE'
+ obj_co = obj.head_local
+ obj_classtype = 'classtype_bone'
+ #}
+
+ # Create node
+ #
+ node = mdl_node()
+ node.pstr_name = encoder_process_pstr( obj.name )
+
+ if node_def["parent"]:
+ node.parent = node_def["parent"]["uid"]
+
+ # Setup transform
+ #
+ node.co[0] = obj_co[0]
+ node.co[1] = obj_co[2]
+ node.co[2] = -obj_co[1]
+
+ # Convert rotation quat to our space type
+ #
+ quat = obj.matrix_local.to_quaternion()
+ node.q[0] = quat[1]
+ node.q[1] = quat[3]
+ node.q[2] = -quat[2]
+ node.q[3] = quat[0]
+
+ # Bone scale is just a vector to the tail
+ #
+ if obj_type == 'BONE':
+ #{
+ node.s[0] = obj.tail_local[0] - node.co[0]
+ node.s[1] = obj.tail_local[2] - node.co[1]
+ node.s[2] = -obj.tail_local[1] - node.co[2]
+ #}
+ else:
+ #{
+ node.s[0] = obj.scale[0]
+ node.s[1] = obj.scale[2]
+ node.s[2] = obj.scale[1]
+ #}
+
+ # Report status
+ #
+ tot_uid = g_encoder['uid_count']-1
+ obj_uid = node_def['uid']
+ obj_depth = node_def['depth']-1
+
+ status_id = F" [{obj_uid: 3}/{tot_uid}]" + " |"*obj_depth
+ status_name = status_id + F" L {obj.name}"
+
+ if obj_classtype != 'classtype_none': status_type = obj_classtype
+ else: status_type = obj_type
+
+ status_parent = F"{node.parent: 3}"
+ status_armref = ""
+
+ if obj_classtype == 'classtype_skin':
+ status_armref = F" [armature -> {armature_def['obj'].cv_data.uid}]"
+
+ print(F"{status_name:<32} {status_type:<22} {status_parent} {status_armref}")
+
+ # Process mesh if needed
+ #
+ if obj_type == 'MESH':
+ #{
+ encoder_compile_mesh( node, node_def )
+ #}
+ elif obj_type == 'ARMATURE':
+ #{
+ encoder_compile_armature( node, node_def )
+ #}
+
+ encoder_compile_ent_as( obj_classtype, node, node_def )
+
+ # Make sure to reset the armature we just mucked about with
+ #
+ if obj_classtype == 'classtype_skin':
+ armature_def['obj'].data.pose_position = POSE_OR_REST_CACHE
+
+ g_encoder['data']['node'] += [node]
+#}
+
+# The post processing step or the pre processing to the writing step
+#
+def encoder_write_to_file( path ):
+#{
+ global g_encoder
+
+ # Compile down to a byte array
+ #
+ header = g_encoder['header']
+ file_pos = sizeof(header)
+ file_data = bytearray()
+ print( " Compositing data arrays" )
+
+ for array_name in g_encoder['data']:
+ #{
+ file_pos += bytearray_align_to( file_data, 16, sizeof(header) )
+ arr = g_encoder['data'][array_name]
+
+ setattr( header, array_name + "_offset", file_pos )
+
+ print( F" {array_name:<16} @{file_pos:> 8X}[{len(arr)}]" )
+
+ if isinstance( arr, bytearray ):
+ #{
+ setattr( header, array_name + "_size", len(arr) )
+
+ file_data.extend( arr )
+ file_pos += len(arr)
+ #}
+ else:
+ #{
+ setattr( header, array_name + "_count", len(arr) )
+
+ for item in arr:
+ #{
+ bbytes = bytearray(item)
+ file_data.extend( bbytes )
+ file_pos += sizeof(item)
+ #}
+ #}
+ #}
+
+ # This imperitive for this field to be santized in the future!
+ #
+ header.file_length = file_pos
+
+ print( " Writing file" )
+ # Write header and data chunk to file
+ #
+ fp = open( path, "wb" )
+ fp.write( bytearray( header ) )
+ fp.write( file_data )
+ fp.close()
+#}
+
+# Main compiler, uses string as the identifier for the collection
+#
+def write_model(collection_name):
+#{
+ global g_encoder
+ print( F"Model graph | Create mode '{collection_name}'" )
+ folder = bpy.path.abspath(bpy.context.scene.cv_data.export_dir)
+ path = F"{folder}{collection_name}.mdl"
+ print( path )
+
+ collection = bpy.data.collections[collection_name]
+
+ encoder_init( collection )
+ encoder_build_scene_graph( collection )
+
+ # Compile
+ #
+ print( " Comping objects" )
+ it = encoder_graph_iterator( g_encoder['scene_graph'] )
+ for node_def in it:
+ encoder_process_definition( node_def )
+
+ # Write
+ #
+ encoder_write_to_file( path )
+
+ print( F"Completed {collection_name}.mdl" )
+#}
+
+# ---------------------------------------------------------------------------- #
+# #
+# GUI section #
+# #
+# ---------------------------------------------------------------------------- #
+
+cv_view_draw_handler = None
+cv_view_shader = gpu.shader.from_builtin('3D_SMOOTH_COLOR')
+cv_view_verts = []
+cv_view_colours = []
+cv_view_course_i = 0
+
+# Draw axis alligned sphere at position with radius
+#
+def cv_draw_sphere( pos, radius, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ ly = pos + Vector((0,0,radius))
+ lx = pos + Vector((0,radius,0))
+ lz = pos + Vector((0,0,radius))
+
+ pi = 3.14159265358979323846264
+
+ for i in range(16):
+ #{
+ t = ((i+1.0) * 1.0/16.0) * pi * 2.0
+ s = math.sin(t)
+ c = math.cos(t)
+
+ py = pos + Vector((s*radius,0.0,c*radius))
+ px = pos + Vector((s*radius,c*radius,0.0))
+ pz = pos + Vector((0.0,s*radius,c*radius))
+
+ cv_view_verts += [ px, lx ]
+ cv_view_verts += [ py, ly ]
+ cv_view_verts += [ pz, lz ]
+
+ cv_view_colours += [ colour, colour, colour, colour, colour, colour ]
+
+ ly = py
+ lx = px
+ lz = pz
+ #}
+ cv_draw_lines()
+#}
+
+# Draw axis alligned sphere at position with radius
+#
+def cv_draw_halfsphere( pos, tx, ty, tz, radius, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ ly = pos + tz*radius
+ lx = pos + ty*radius
+ lz = pos + tz*radius
+
+ pi = 3.14159265358979323846264
+
+ for i in range(16):
+ #{
+ t = ((i+1.0) * 1.0/16.0) * pi
+ s = math.sin(t)
+ c = math.cos(t)
+
+ s1 = math.sin(t*2.0)
+ c1 = math.cos(t*2.0)
+
+ py = pos + s*tx*radius + c *tz*radius
+ px = pos + s*tx*radius + c *ty*radius
+ pz = pos + s1*ty*radius + c1*tz*radius
+
+ cv_view_verts += [ px, lx ]
+ cv_view_verts += [ py, ly ]
+ cv_view_verts += [ pz, lz ]
+
+ cv_view_colours += [ colour, colour, colour, colour, colour, colour ]
+
+ ly = py
+ lx = px
+ lz = pz
+ #}
+ cv_draw_lines()
+#}
+
+# Draw transformed -1 -> 1 cube
+#
+def cv_draw_ucube( transform, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ a = Vector((-1,-1,-1))
+ b = Vector((1,1,1))
+
+ vs = [None]*8
+ vs[0] = transform @ Vector((a[0], a[1], a[2]))
+ vs[1] = transform @ Vector((a[0], b[1], a[2]))
+ vs[2] = transform @ Vector((b[0], b[1], a[2]))
+ vs[3] = transform @ Vector((b[0], a[1], a[2]))
+ vs[4] = transform @ Vector((a[0], a[1], b[2]))
+ vs[5] = transform @ Vector((a[0], b[1], b[2]))
+ vs[6] = transform @ Vector((b[0], b[1], b[2]))
+ vs[7] = transform @ Vector((b[0], a[1], b[2]))
+
+ indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\
+ (0,4),(1,5),(2,6),(3,7)]
+
+ for l in indices:
+ #{
+ v0 = vs[l[0]]
+ v1 = vs[l[1]]
+ cv_view_verts += [(v0[0],v0[1],v0[2])]
+ cv_view_verts += [(v1[0],v1[1],v1[2])]
+ cv_view_colours += [(0,1,0,1),(0,1,0,1)]
+ #}
+ cv_draw_lines()
+#}
+
+# Draw line with colour
+#
+def cv_draw_line( p0, p1, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ cv_view_verts += [p0,p1]
+ cv_view_colours += [colour, colour]
+ cv_draw_lines()
+#}
+
+# Draw line with colour(s)
+#
+def cv_draw_line2( p0, p1, c0, c1 ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ cv_view_verts += [p0,p1]
+ cv_view_colours += [c0,c1]
+ cv_draw_lines()
+#}
+
+#
+#
+def cv_tangent_basis( n, tx, ty ):
+#{
+ if abs( n[0] ) >= 0.57735027:
+ #{
+ tx[0] = n[1]
+ tx[1] = -n[0]
+ tx[2] = 0.0
+ #}
+ else:
+ #{
+ tx[0] = 0.0
+ tx[1] = n[2]
+ tx[2] = -n[1]
+ #}
+
+ tx.normalize()
+ _ty = n.cross( tx )
+
+ ty[0] = _ty[0]
+ ty[1] = _ty[1]
+ ty[2] = _ty[2]
+#}
+
+# Draw coloured arrow
+#
+def cv_draw_arrow( p0, p1, c0 ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ n = p1-p0
+ midpt = p0 + n*0.5
+ n.normalize()
+
+ tx = Vector((1,0,0))
+ ty = Vector((1,0,0))
+ cv_tangent_basis( n, tx, ty )
+
+ cv_view_verts += [p0,p1, midpt+(tx-n)*0.15,midpt, midpt+(-tx-n)*0.15,midpt ]
+ cv_view_colours += [c0,c0,c0,c0,c0,c0]
+ cv_draw_lines()
+#}
+
+# Drawhandles of a bezier control point
+#
+def cv_draw_bhandle( obj, direction, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ p0 = obj.location
+ h0 = obj.matrix_world @ Vector((0,direction,0))
+
+ cv_view_verts += [p0]
+ cv_view_verts += [h0]
+ cv_view_colours += [colour,colour]
+ cv_draw_lines()
+#}
+
+# Draw a bezier curve (at fixed resolution 10)
+#
+def cv_draw_bezier( p0,h0,p1,h1,c0,c1 ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ last = p0
+ for i in range(10):
+ #{
+ t = (i+1)/10
+ a0 = 1-t
+
+ tt = t*t
+ ttt = tt*t
+ p=ttt*p1+(3*tt-3*ttt)*h1+(3*ttt-6*tt+3*t)*h0+(3*tt-ttt-3*t+1)*p0
+
+ cv_view_verts += [(last[0],last[1],last[2])]
+ cv_view_verts += [(p[0],p[1],p[2])]
+ cv_view_colours += [c0*a0+c1*(1-a0),c0*a0+c1*(1-a0)]
+
+ last = p
+ #}
+ cv_draw_lines()
+#}
+
+# I think this one extends the handles of the bezier otwards......
+#
+def cv_draw_sbpath( o0,o1,c0,c1,s0,s1 ):
+#{
+ global cv_view_course_i
+
+ offs = ((cv_view_course_i % 2)*2-1) * cv_view_course_i * 0.02
+
+ p0 = o0.matrix_world @ Vector((offs, 0,0))
+ h0 = o0.matrix_world @ Vector((offs, s0,0))
+ p1 = o1.matrix_world @ Vector((offs, 0,0))
+ h1 = o1.matrix_world @ Vector((offs,-s1,0))
+
+ cv_draw_bezier( p0,h0,p1,h1,c0,c1 )
+ cv_draw_lines()
+#}
+
+# Flush the lines buffers. This is called often because god help you if you want
+# to do fixed, fast buffers in this catastrophic programming language.
+#
+def cv_draw_lines():
+#{
+ global cv_view_shader, cv_view_verts, cv_view_colours
+
+ if len(cv_view_verts) < 2:
+ return
+
+ lines = batch_for_shader(\
+ cv_view_shader, 'LINES', \
+ { "pos":cv_view_verts, "color":cv_view_colours })
+
+ lines.draw( cv_view_shader )
+
+ cv_view_verts = []
+ cv_view_colours = []
+#}
+
+# I dont remember what this does exactly
+#
+def cv_draw_bpath( o0,o1,c0,c1 ):
+#{
+ cv_draw_sbpath( o0,o1,c0,c1,1.0,1.0 )
+#}
+
+# Semi circle to show the limit. and some lines
+#
+def draw_limit( obj, center, major, minor, amin, amax, colour ):
+#{
+ global cv_view_verts, cv_view_colours
+ f = 0.05
+ ay = major*f
+ ax = minor*f
+
+ for x in range(16):
+ #{
+ t0 = x/16
+ t1 = (x+1)/16
+ a0 = amin*(1.0-t0)+amax*t0
+ a1 = amin*(1.0-t1)+amax*t1
+
+ p0 = center + major*f*math.cos(a0) + minor*f*math.sin(a0)
+ p1 = center + major*f*math.cos(a1) + minor*f*math.sin(a1)
+
+ p0=obj.matrix_world @ p0
+ p1=obj.matrix_world @ p1
+ cv_view_verts += [p0,p1]
+ cv_view_colours += [colour,colour]
+
+ if x == 0:
+ #{
+ cv_view_verts += [p0,center]
+ cv_view_colours += [colour,colour]
+ #}
+ if x == 15:
+ #{
+ cv_view_verts += [p1,center]
+ cv_view_colours += [colour,colour]
+ #}
+ #}
+
+ cv_view_verts += [center+major*1.2*f,center+major*f*0.8]
+ cv_view_colours += [colour,colour]
+
+ cv_draw_lines()
+#}
+
+# Cone and twist limit
+#
+def draw_cone_twist( center, vx, vy, va ):
+#{
+ global cv_view_verts, cv_view_colours
+ axis = vy.cross( vx )
+ axis.normalize()
+
+ size = 0.12
+
+ cv_view_verts += [center, center+va*size]
+ cv_view_colours += [ (1,1,1,1), (1,1,1,1) ]
+
+ for x in range(32):
+ #{
+ t0 = (x/32) * math.tau
+ t1 = ((x+1)/32) * math.tau
+
+ c0 = math.cos(t0)
+ s0 = math.sin(t0)
+ c1 = math.cos(t1)
+ s1 = math.sin(t1)
+
+ p0 = center + (axis + vx*c0 + vy*s0).normalized() * size
+ p1 = center + (axis + vx*c1 + vy*s1).normalized() * size
+
+ col0 = ( abs(c0), abs(s0), 0.0, 1.0 )
+ col1 = ( abs(c1), abs(s1), 0.0, 1.0 )
+
+ cv_view_verts += [center, p0, p0, p1]
+ cv_view_colours += [ (0,0,0,0), col0, col0, col1 ]
+ #}
+
+ cv_draw_lines()
+#}
+
+# Draws constraints and stuff for the skeleton. This isnt documented and wont be
+#
+def draw_skeleton_helpers( obj ):
+#{
+ global cv_view_verts, cv_view_colours
+
+ if obj.data.pose_position != 'REST':
+ #{
+ return
+ #}
+
+ for bone in obj.data.bones:
+ #{
+ c = bone.head_local
+ a = Vector((bone.cv_data.v0[0], bone.cv_data.v0[1], bone.cv_data.v0[2]))
+ b = Vector((bone.cv_data.v1[0], bone.cv_data.v1[1], bone.cv_data.v1[2]))
+
+ if bone.cv_data.collider == 'collider_box':
+ #{
+
+ vs = [None]*8
+ vs[0]=obj.matrix_world@Vector((c[0]+a[0],c[1]+a[1],c[2]+a[2]))
+ vs[1]=obj.matrix_world@Vector((c[0]+a[0],c[1]+b[1],c[2]+a[2]))
+ vs[2]=obj.matrix_world@Vector((c[0]+b[0],c[1]+b[1],c[2]+a[2]))
+ vs[3]=obj.matrix_world@Vector((c[0]+b[0],c[1]+a[1],c[2]+a[2]))
+ vs[4]=obj.matrix_world@Vector((c[0]+a[0],c[1]+a[1],c[2]+b[2]))
+ vs[5]=obj.matrix_world@Vector((c[0]+a[0],c[1]+b[1],c[2]+b[2]))
+ vs[6]=obj.matrix_world@Vector((c[0]+b[0],c[1]+b[1],c[2]+b[2]))
+ vs[7]=obj.matrix_world@Vector((c[0]+b[0],c[1]+a[1],c[2]+b[2]))
+
+ indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\
+ (0,4),(1,5),(2,6),(3,7)]
+
+ for l in indices:
+ #{
+ v0 = vs[l[0]]
+ v1 = vs[l[1]]
+
+ cv_view_verts += [(v0[0],v0[1],v0[2])]
+ cv_view_verts += [(v1[0],v1[1],v1[2])]
+ cv_view_colours += [(0.5,0.5,0.5,0.5),(0.5,0.5,0.5,0.5)]
+ #}
+ #}
+ elif bone.cv_data.collider == 'collider_capsule':
+ #{
+ v0 = b-a
+ major_axis = 0
+ largest = -1.0
+
+ for i in range(3):
+ #{
+ if abs(v0[i]) > largest:
+ #{
+ largest = abs(v0[i])
+ major_axis = i
+ #}
+ #}
+
+ v1 = Vector((0,0,0))
+ v1[major_axis] = 1.0
+
+ tx = Vector((0,0,0))
+ ty = Vector((0,0,0))
+
+ cv_tangent_basis( v1, tx, ty )
+ r = (abs(tx.dot( v0 )) + abs(ty.dot( v0 ))) * 0.25
+ l = v0[ major_axis ] - r*2
+
+ p0 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l*-0.5 )
+ p1 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l* 0.5 )
+
+ colour = [0.2,0.2,0.2,1.0]
+ colour[major_axis] = 0.5
+
+ cv_draw_halfsphere( p0, -v1, ty, tx, r, colour )
+ cv_draw_halfsphere( p1, v1, ty, tx, r, colour )
+ cv_draw_line( p0+tx* r, p1+tx* r, colour )
+ cv_draw_line( p0+tx*-r, p1+tx*-r, colour )
+ cv_draw_line( p0+ty* r, p1+ty* r, colour )
+ cv_draw_line( p0+ty*-r, p1+ty*-r, colour )
+ #}
+ else:
+ #{
+ continue
+ #}
+
+ center = obj.matrix_world @ c
+ if bone.cv_data.con0:
+ #{
+ vx = Vector([bone.cv_data.conevx[_] for _ in range(3)])
+ vy = Vector([bone.cv_data.conevy[_] for _ in range(3)])
+ va = Vector([bone.cv_data.coneva[_] for _ in range(3)])
+ draw_cone_twist( center, vx, vy, va )
+
+ #draw_limit( obj, c, Vector((0,0,1)),Vector((0,-1,0)), \
+ # bone.cv_data.mins[0], bone.cv_data.maxs[0], \
+ # (1,0,0,1))
+ #draw_limit( obj, c, Vector((0,-1,0)),Vector((1,0,0)), \
+ # bone.cv_data.mins[1], bone.cv_data.maxs[1], \
+ # (0,1,0,1))
+ #draw_limit( obj, c, Vector((1,0,0)),Vector((0,0,1)), \
+ # bone.cv_data.mins[2], bone.cv_data.maxs[2], \
+ # (0,0,1,1))
+ #}
+ #}
+#}
+
+def cv_draw():
+#{
+ global cv_view_shader
+ global cv_view_verts
+ global cv_view_colours
+ global cv_view_course_i
+
+ cv_view_course_i = 0
+ cv_view_verts = []
+ cv_view_colours = []
+
+ cv_view_shader.bind()
+ gpu.state.depth_mask_set(False)
+ gpu.state.line_width_set(2.0)
+ gpu.state.face_culling_set('BACK')
+ gpu.state.depth_test_set('LESS')
+ gpu.state.blend_set('NONE')
+
+ for obj in bpy.context.collection.objects:
+ #{
+ if obj.type == 'ARMATURE':
+ #{
+ if obj.data.pose_position == 'REST':
+ draw_skeleton_helpers( obj )
+ #}
+ else:
+ #{
+ classtype = obj.cv_data.classtype
+ if (classtype != 'classtype_none') and (classtype in globals()):
+ #{
+ cl = globals()[ classtype ]
+
+ if getattr( cl, "draw_scene_helpers", None ):
+ #{
+ cl.draw_scene_helpers( obj )
+ #}
+ #}
+ #}
+ #}
+
+ cv_draw_lines()
+ return
+#}
+
+
+# ---------------------------------------------------------------------------- #
+# #
+# Blender #
+# #
+# ---------------------------------------------------------------------------- #
+
+# Checks whether this object has a classtype assigned. we can only target other
+# classes
+def cv_poll_target(scene, obj):
+#{
+ if obj == bpy.context.active_object:
+ return False
+ if obj.cv_data.classtype == 'classtype_none':
+ return False
+
+ return True
+#}
+
+class CV_MESH_SETTINGS(bpy.types.PropertyGroup):
+#{
+ v0: bpy.props.FloatVectorProperty(name="v0",size=3)
+ v1: bpy.props.FloatVectorProperty(name="v1",size=3)
+ v2: bpy.props.FloatVectorProperty(name="v2",size=3)
+ v3: bpy.props.FloatVectorProperty(name="v3",size=3)
+#}
+
+class CV_LIGHT_SETTINGS(bpy.types.PropertyGroup):
+#{
+ bp0: bpy.props.BoolProperty( name="bp0" );
+#}
+
+class CV_LIGHT_PANEL(bpy.types.Panel):
+#{
+ bl_label="[Skate Rift]"
+ bl_idname="SCENE_PT_cv_light"
+ bl_space_type='PROPERTIES'
+ bl_region_type='WINDOW'
+ bl_context='data'
+
+ def draw(_,context):
+ #{
+ active_object = context.active_object
+ if active_object == None: return
+
+ if active_object.type != 'LIGHT': return
+
+ data = active_object.data.cv_data
+ _.layout.prop( data, "bp0", text="Only on during night" )
+ #}
+#}
+
+class CV_OBJ_SETTINGS(bpy.types.PropertyGroup):
+#{
+ uid: bpy.props.IntProperty( name="" )
+
+ strp: bpy.props.StringProperty( name="strp" )
+ intp: bpy.props.IntProperty( name="intp" )
+ fltp: bpy.props.FloatProperty( name="fltp" )
+ bp0: bpy.props.BoolProperty( name="bp0" )
+ bp1: bpy.props.BoolProperty( name="bp1" )
+ bp2: bpy.props.BoolProperty( name="bp2" )
+ bp3: bpy.props.BoolProperty( name="bp3" )
+
+ target: bpy.props.PointerProperty( type=bpy.types.Object, name="target", \
+ poll=cv_poll_target )
+ target1: bpy.props.PointerProperty( type=bpy.types.Object, name="target1", \
+ poll=cv_poll_target )
+ target2: bpy.props.PointerProperty( type=bpy.types.Object, name="target2", \
+ poll=cv_poll_target )
+ target3: bpy.props.PointerProperty( type=bpy.types.Object, name="target3", \
+ poll=cv_poll_target )
+
+ colour: bpy.props.FloatVectorProperty( name="colour",subtype='COLOR',\
+ min=0.0,max=1.0)
+
+ classtype: bpy.props.EnumProperty(
+ name="Format",
+ items = [
+ ('classtype_none', "classtype_none", "", 0),
+ ('classtype_gate', "classtype_gate", "", 1),
+ ('classtype_spawn', "classtype_spawn", "", 3),
+ ('classtype_water', "classtype_water", "", 4),
+ ('classtype_route_node', "classtype_route_node", "", 8 ),
+ ('classtype_route', "classtype_route", "", 9 ),
+ ('classtype_audio',"classtype_audio","",14),
+ ('classtype_trigger',"classtype_trigger","",100),
+ ('classtype_logic_achievement',"classtype_logic_achievement","",101),
+ ('classtype_logic_relay',"classtype_logic_relay","",102),
+ ('classtype_spawn_link',"classtype_spawn_link","",150),
+ ('classtype_nonlocal_gate', "classtype_nonlocal_gate", "", 300)
+ ])
+
+ audio_format: bpy.props.EnumProperty(
+ name="Loaded format",
+ items = [
+ ('mono', "mono", "", 0),
+ ('stereo', "stereo", "", 1),
+ ('remain compressed', "remain compressed", "", 2)
+ ])
+#}
+
+class CV_BONE_SETTINGS(bpy.types.PropertyGroup):
+#{
+ collider: bpy.props.EnumProperty(
+ name="Collider Type",
+ items = [
+ ('collider_none', "collider_none", "", 0),
+ ('collider_box', "collider_box", "", 1),
+ ('collider_capsule', "collider_capsule", "", 2),
+ ])
+
+ v0: bpy.props.FloatVectorProperty(name="v0",size=3)
+ v1: bpy.props.FloatVectorProperty(name="v1",size=3)
+
+ con0: bpy.props.BoolProperty(name="Constriant 0",default=False)
+ mins: bpy.props.FloatVectorProperty(name="mins",size=3)
+ maxs: bpy.props.FloatVectorProperty(name="maxs",size=3)
+
+ conevx: bpy.props.FloatVectorProperty(name="conevx",size=3)
+ conevy: bpy.props.FloatVectorProperty(name="conevy",size=3)
+ coneva: bpy.props.FloatVectorProperty(name="coneva",size=3)
+ conet: bpy.props.FloatProperty(name="conet")
+#}
+
+class CV_BONE_PANEL(bpy.types.Panel):
+#{
+ bl_label="[Skate Rift]"
+ bl_idname="SCENE_PT_cv_bone"
+ bl_space_type='PROPERTIES'
+ bl_region_type='WINDOW'
+ bl_context='bone'
+
+ def draw(_,context):
+ #{
+ active_object = context.active_object
+ if active_object == None: return
+
+ bone = active_object.data.bones.active
+ if bone == None: return
+
+ _.layout.prop( bone.cv_data, "collider" )
+ _.layout.prop( bone.cv_data, "v0" )
+ _.layout.prop( bone.cv_data, "v1" )
+
+ _.layout.label( text="Angle Limits" )
+ _.layout.prop( bone.cv_data, "con0" )
+
+ _.layout.prop( bone.cv_data, "conevx" )
+ _.layout.prop( bone.cv_data, "conevy" )
+ _.layout.prop( bone.cv_data, "coneva" )
+ _.layout.prop( bone.cv_data, "conet" )
+ #}
+#}
+
+class CV_SCENE_SETTINGS(bpy.types.PropertyGroup):
+#{
+ use_hidden: bpy.props.BoolProperty( name="use hidden", default=False )
+ export_dir: bpy.props.StringProperty( name="Export Dir", subtype='DIR_PATH' )
+#}
+
+class CV_COLLECTION_SETTINGS(bpy.types.PropertyGroup):
+#{
+ pack_textures: bpy.props.BoolProperty( name="Pack Textures", default=False )
+ animations: bpy.props.BoolProperty( name="Export animation", default=True)
+#}
+
+class CV_MATERIAL_SETTINGS(bpy.types.PropertyGroup):
+#{
+ shader: bpy.props.EnumProperty(
+ name="Format",
+ items = [
+ ('standard',"standard","",0),
+ ('standard_cutout', "standard_cutout", "", 1),
+ ('terrain_blend', "terrain_blend", "", 2),
+ ('vertex_blend', "vertex_blend", "", 3),
+ ('water',"water","",4),
+ ])
+
+ surface_prop: bpy.props.EnumProperty(
+ name="Surface Property",
+ items = [
+ ('concrete','concrete','',0),
+ ('wood','wood','',1),
+ ('grass','grass','',2),
+ ('tiles','tiles','',3)
+ ])
+
+ collision: bpy.props.BoolProperty( \
+ name="Collisions Enabled",\
+ default=True,\
+ description = "Can the player collide with this material"\
+ )
+ skate_surface: bpy.props.BoolProperty( \
+ name="Skate Surface", \
+ default=True,\
+ description = "Should the game try to target this surface?" \
+ )
+ grind_surface: bpy.props.BoolProperty( \
+ name="Grind Surface", \
+ default=False,\
+ description = "Grind face?" \
+ )
+ grow_grass: bpy.props.BoolProperty( \
+ name="Grow Grass", \
+ default=False,\
+ description = "Spawn grass sprites on this surface?" \
+ )
+ blend_offset: bpy.props.FloatVectorProperty( \
+ name="Blend Offset", \
+ size=2, \
+ default=Vector((0.5,0.0)),\
+ description="When surface is more than 45 degrees, add this vector " +\
+ "to the UVs" \
+ )
+ sand_colour: bpy.props.FloatVectorProperty( \
+ name="Sand Colour",\
+ subtype='COLOR',\
+ min=0.0,max=1.0,\
+ default=Vector((0.79,0.63,0.48)),\
+ description="Blend to this colour near the 0 coordinate on UP axis"\
+ )
+ shore_colour: bpy.props.FloatVectorProperty( \
+ name="Shore Colour",\
+ subtype='COLOR',\
+ min=0.0,max=1.0,\
+ default=Vector((0.03,0.32,0.61)),\
+ description="Water colour at the shoreline"\
+ )
+ ocean_colour: bpy.props.FloatVectorProperty( \
+ name="Ocean Colour",\
+ subtype='COLOR',\
+ min=0.0,max=1.0,\
+ default=Vector((0.0,0.006,0.03)),\
+ description="Water colour in the deep bits"\
+ )
+#}
+
+class CV_MATERIAL_PANEL(bpy.types.Panel):
+#{
+ bl_label="Skate Rift material"
+ bl_idname="MATERIAL_PT_cv_material"
+ bl_space_type='PROPERTIES'
+ bl_region_type='WINDOW'
+ bl_context="material"
+
+ def draw(_,context):
+ #{
+ active_object = bpy.context.active_object
+ if active_object == None: return
+ active_mat = active_object.active_material
+ if active_mat == None: return
+
+ info = material_info( active_mat )
+
+ if 'tex_diffuse' in info:
+ #{
+ _.layout.label( icon='INFO', \
+ text=F"{info['tex_diffuse'].name} will be compiled" )
+ #}
+
+ _.layout.prop( active_mat.cv_data, "shader" )
+ _.layout.prop( active_mat.cv_data, "surface_prop" )
+ _.layout.prop( active_mat.cv_data, "collision" )
+
+ if active_mat.cv_data.collision:
+ _.layout.prop( active_mat.cv_data, "skate_surface" )
+ _.layout.prop( active_mat.cv_data, "grind_surface" )
+ _.layout.prop( active_mat.cv_data, "grow_grass" )
+
+ if active_mat.cv_data.shader == "terrain_blend":
+ #{
+ box = _.layout.box()
+ box.prop( active_mat.cv_data, "blend_offset" )
+ box.prop( active_mat.cv_data, "sand_colour" )
+ #}
+ elif active_mat.cv_data.shader == "vertex_blend":
+ #{
+ box = _.layout.box()
+ box.label( icon='INFO', text="Uses vertex colours, the R channel" )
+ box.prop( active_mat.cv_data, "blend_offset" )
+ #}
+ elif active_mat.cv_data.shader == "water":
+ #{
+ box = _.layout.box()
+ box.label( icon='INFO', text="Depth scale of 16 meters" )
+ box.prop( active_mat.cv_data, "shore_colour" )
+ box.prop( active_mat.cv_data, "ocean_colour" )
+ #}
+ #}
+#}
+
+class CV_OBJ_PANEL(bpy.types.Panel):
+#{
+ bl_label="Entity Config"
+ bl_idname="SCENE_PT_cv_entity"
+ bl_space_type='PROPERTIES'
+ bl_region_type='WINDOW'
+ bl_context="object"
+
+ def draw(_,context):
+ #{
+ active_object = bpy.context.active_object
+ if active_object == None: return
+ if active_object.type == 'ARMATURE':
+ #{
+ row = _.layout.row()
+ row.enabled = False
+ row.label( text="This object has the intrinsic classtype of skeleton" )
+ return
+ #}
+
+ _.layout.prop( active_object.cv_data, "classtype" )
+
+ classtype = active_object.cv_data.classtype
+
+ if (classtype != 'classtype_none') and (classtype in globals()):
+ #{
+ cl = globals()[ classtype ]
+
+ if getattr( cl, "editor_interface", None ):
+ #{
+ cl.editor_interface( _.layout, active_object )
+ #}
+ #}
+ #}
+#}
+
+class CV_COMPILE(bpy.types.Operator):
+#{
+ bl_idname="carve.compile_all"
+ bl_label="Compile All"
+
+ def execute(_,context):
+ #{
+ view_layer = bpy.context.view_layer
+ for col in view_layer.layer_collection.children["export"].children:
+ if not col.hide_viewport or bpy.context.scene.cv_data.use_hidden:
+ write_model( col.name )
+
+ return {'FINISHED'}
+ #}
+#}
+
+class CV_COMPILE_THIS(bpy.types.Operator):
+#{
+ bl_idname="carve.compile_this"
+ bl_label="Compile This collection"
+
+ def execute(_,context):
+ #{
+ col = bpy.context.collection
+ write_model( col.name )
+
+ return {'FINISHED'}
+ #}
+#}
+
+class CV_INTERFACE(bpy.types.Panel):
+#{
+ bl_idname = "VIEW3D_PT_carve"
+ bl_label = "Skate Rift"
+ bl_space_type = 'VIEW_3D'
+ bl_region_type = 'UI'
+ bl_category = "Skate Rift"
+
+ def draw(_, context):
+ #{
+ layout = _.layout
+ layout.prop( context.scene.cv_data, "export_dir" )
+
+ col = bpy.context.collection
+
+ found_in_export = False
+ export_count = 0
+ view_layer = bpy.context.view_layer
+ for c1 in view_layer.layer_collection.children["export"].children:
+ #{
+ if not c1.hide_viewport or bpy.context.scene.cv_data.use_hidden:
+ export_count += 1
+
+ if c1.name == col.name:
+ #{
+ found_in_export = True
+ #}
+ #}
+
+ box = layout.box()
+ if found_in_export:
+ #{
+ box.label( text=col.name + ".mdl" )
+ box.prop( col.cv_data, "pack_textures" )
+ box.prop( col.cv_data, "animations" )
+ box.operator( "carve.compile_this" )
+ #}
+ else:
+ #{
+ row = box.row()
+ row.enabled=False
+ row.label( text=col.name )
+ box.label( text="This collection is not in the export group" )
+ #}
+
+ box = layout.box()
+ row = box.row()
+
+ split = row.split( factor = 0.3, align=True )
+ split.prop( context.scene.cv_data, "use_hidden", text="hidden" )
+
+ row1 = split.row()
+ if export_count == 0:
+ row1.enabled=False
+ row1.operator( "carve.compile_all", \
+ text=F"Compile all ({export_count} collections)" )
+ #}
+#}
+
+
+classes = [CV_OBJ_SETTINGS,CV_OBJ_PANEL,CV_COMPILE,CV_INTERFACE,\
+ CV_MESH_SETTINGS, CV_SCENE_SETTINGS, CV_BONE_SETTINGS,\
+ CV_BONE_PANEL, CV_COLLECTION_SETTINGS, CV_COMPILE_THIS,\
+ CV_MATERIAL_SETTINGS, CV_MATERIAL_PANEL, CV_LIGHT_SETTINGS,\
+ CV_LIGHT_PANEL]
+
+def register():
+#{
+ global cv_view_draw_handler
+
+ for c in classes:
+ bpy.utils.register_class(c)
+
+ bpy.types.Object.cv_data = bpy.props.PointerProperty(type=CV_OBJ_SETTINGS)
+ bpy.types.Mesh.cv_data = bpy.props.PointerProperty(type=CV_MESH_SETTINGS)
+ bpy.types.Scene.cv_data = bpy.props.PointerProperty(type=CV_SCENE_SETTINGS)
+ bpy.types.Bone.cv_data = bpy.props.PointerProperty(type=CV_BONE_SETTINGS)
+ bpy.types.Collection.cv_data = \
+ bpy.props.PointerProperty(type=CV_COLLECTION_SETTINGS)
+ bpy.types.Material.cv_data = \
+ bpy.props.PointerProperty(type=CV_MATERIAL_SETTINGS)
+ bpy.types.Light.cv_data = bpy.props.PointerProperty(type=CV_LIGHT_SETTINGS)
+
+ cv_view_draw_handler = bpy.types.SpaceView3D.draw_handler_add(\
+ cv_draw,(),'WINDOW','POST_VIEW')
+#}
+
+def unregister():
+#{
+ global cv_view_draw_handler
+
+ for c in classes:
+ bpy.utils.unregister_class(c)
+
+ bpy.types.SpaceView3D.draw_handler_remove(cv_view_draw_handler,'WINDOW')
+#}
+
+# ---------------------------------------------------------------------------- #
+# #
+# QOI encoder #
+# #
+# ---------------------------------------------------------------------------- #
+# #
+# Transliteration of: #
+# https://github.com/phoboslab/qoi/blob/master/qoi.h #
+# #
+# Copyright (c) 2021, Dominic Szablewski - https://phoboslab.org #
+# SPDX-License-Identifier: MIT #
+# QOI - The "Quite OK Image" format for fast, lossless image compression #
+# #
+# ---------------------------------------------------------------------------- #
+
+class qoi_rgba_t(Structure):
+#{
+ _pack_ = 1
+ _fields_ = [("r",c_uint8),
+ ("g",c_uint8),
+ ("b",c_uint8),
+ ("a",c_uint8)]
+#}
+
+QOI_OP_INDEX = 0x00 # 00xxxxxx
+QOI_OP_DIFF = 0x40 # 01xxxxxx
+QOI_OP_LUMA = 0x80 # 10xxxxxx
+QOI_OP_RUN = 0xc0 # 11xxxxxx
+QOI_OP_RGB = 0xfe # 11111110
+QOI_OP_RGBA = 0xff # 11111111
+
+QOI_MASK_2 = 0xc0 # 11000000
+
+def qoi_colour_hash( c ):
+#{
+ return c.r*3 + c.g*5 + c.b*7 + c.a*11
+#}
+
+def qoi_eq( a, b ):
+#{
+ return (a.r==b.r) and (a.g==b.g) and (a.b==b.b) and (a.a==b.a)
+#}
+
+def qoi_32bit( v ):
+#{
+ return bytearray([ (0xff000000 & v) >> 24, \
+ (0x00ff0000 & v) >> 16, \
+ (0x0000ff00 & v) >> 8, \
+ (0x000000ff & v) ])
+#}
+
+def qoi_encode( img ):
+#{
+ data = bytearray()
+
+ print(F" . Encoding {img.name}.qoi[{img.size[0]},{img.size[1]}]")
+
+ index = [ qoi_rgba_t() for _ in range(64) ]
+
+ # Header
+ #
+ data.extend( bytearray(c_uint32(0x66696f71)) )
+ data.extend( qoi_32bit( img.size[0] ) )
+ data.extend( qoi_32bit( img.size[1] ) )
+ data.extend( bytearray(c_uint8(4)) )
+ data.extend( bytearray(c_uint8(0)) )
+
+ run = 0
+ px_prev = qoi_rgba_t()
+ px_prev.r = c_uint8(0)
+ px_prev.g = c_uint8(0)
+ px_prev.b = c_uint8(0)
+ px_prev.a = c_uint8(255)
+
+ px = qoi_rgba_t()
+ px.r = c_uint8(0)
+ px.g = c_uint8(0)
+ px.b = c_uint8(0)
+ px.a = c_uint8(255)
+
+ px_len = img.size[0] * img.size[1]
+
+ paxels = [ int(min(max(_,0),1)*255) for _ in img.pixels ]
+
+ for px_pos in range( px_len ):
+ #{
+ idx = px_pos * img.channels
+ nc = img.channels-1
+
+ px.r = paxels[idx+min(0,nc)]
+ px.g = paxels[idx+min(1,nc)]
+ px.b = paxels[idx+min(2,nc)]
+ px.a = paxels[idx+min(3,nc)]
+
+ if qoi_eq( px, px_prev ):
+ #{
+ run += 1
+
+ if (run == 62) or (px_pos == px_len-1):
+ #{
+ data.extend( bytearray( c_uint8(QOI_OP_RUN | (run-1))) )
+ run = 0
+ #}
+ #}
+ else:
+ #{
+ if run > 0:
+ #{
+ data.extend( bytearray( c_uint8(QOI_OP_RUN | (run-1))) )
+ run = 0
+ #}
+
+ index_pos = qoi_colour_hash(px) % 64
+
+ if qoi_eq( index[index_pos], px ):
+ #{
+ data.extend( bytearray( c_uint8(QOI_OP_INDEX | index_pos)) )
+ #}
+ else:
+ #{
+ index[ index_pos ].r = px.r
+ index[ index_pos ].g = px.g
+ index[ index_pos ].b = px.b
+ index[ index_pos ].a = px.a
+
+ if px.a == px_prev.a:
+ #{
+ vr = int(px.r) - int(px_prev.r)
+ vg = int(px.g) - int(px_prev.g)
+ vb = int(px.b) - int(px_prev.b)
+
+ vg_r = vr - vg
+ vg_b = vb - vg
+
+ if (vr > -3) and (vr < 2) and\
+ (vg > -3) and (vg < 2) and\
+ (vb > -3) and (vb < 2):
+ #{
+ op = QOI_OP_DIFF | (vr+2) << 4 | (vg+2) << 2 | (vb+2)
+ data.extend( bytearray( c_uint8(op) ))
+ #}
+ elif (vg_r > -9) and (vg_r < 8) and\
+ (vg > -33) and (vg < 32 ) and\
+ (vg_b > -9) and (vg_b < 8):
+ #{
+ op = QOI_OP_LUMA | (vg+32)
+ delta = (vg_r+8) << 4 | (vg_b + 8)
+ data.extend( bytearray( c_uint8(op) ) )
+ data.extend( bytearray( c_uint8(delta) ))
+ #}
+ else:
+ #{
+ data.extend( bytearray( c_uint8(QOI_OP_RGB) ) )
+ data.extend( bytearray( c_uint8(px.r) ))
+ data.extend( bytearray( c_uint8(px.g) ))
+ data.extend( bytearray( c_uint8(px.b) ))
+ #}
+ #}
+ else:
+ #{
+ data.extend( bytearray( c_uint8(QOI_OP_RGBA) ) )
+ data.extend( bytearray( c_uint8(px.r) ))
+ data.extend( bytearray( c_uint8(px.g) ))
+ data.extend( bytearray( c_uint8(px.b) ))
+ data.extend( bytearray( c_uint8(px.a) ))
+ #}
+ #}
+ #}
+
+ px_prev.r = px.r
+ px_prev.g = px.g
+ px_prev.b = px.b
+ px_prev.a = px.a
+ #}
+
+ # Padding
+ for i in range(7):
+ data.extend( bytearray( c_uint8(0) ))
+ data.extend( bytearray( c_uint8(1) ))
+ bytearray_align_to( data, 16, 0 )
-for col in bpy.data.collections["export"].children:
- write_model( col.name )
+ return data
+#}