+ for j in range(3):#{
+ vert = data.vertices[tri.vertices[j]]
+ li = tri.loops[j]
+ vi = data.loops[li].vertex_index
+
+ # Gather vertex information
+ #
+ co = vert.co
+ norm = data.loops[li].normal
+ uv = (0,0)
+ colour = (255,255,255,255)
+ groups = [0,0,0,0]
+ weights = [0,0,0,0]
+
+ # Uvs
+ #
+ if data.uv_layers:
+ uv = data.uv_layers.active.data[li].uv
+
+ # Vertex Colours
+ #
+ if data.vertex_colors:#{
+ colour = data.vertex_colors.active.data[li].color
+ colour = (int(colour[0]*255.0),\
+ int(colour[1]*255.0),\
+ int(colour[2]*255.0),\
+ int(colour[3]*255.0))
+ #}
+
+ # Weight groups: truncates to the 3 with the most influence. The
+ # fourth bone ID is never used by the shader so it
+ # is always 0
+ #
+ if armature:#{
+ src_groups = [_ for _ in data.vertices[vi].groups \
+ if obj.vertex_groups[_.group].name in \
+ rig_weight_groups ]
+
+ weight_groups = sorted( src_groups, key = \
+ lambda a: a.weight, reverse=True )
+ tot = 0.0
+ for ml in range(3):#{
+ if len(weight_groups) > ml:#{
+ g = weight_groups[ml]
+ name = obj.vertex_groups[g.group].name
+ weight = g.weight
+ weights[ml] = weight
+ groups[ml] = rig_weight_groups.index(name)
+ tot += weight
+ #}
+ #}
+
+ if len(weight_groups) > 0:#{
+ inv_norm = (1.0/tot) * 65535.0
+ for ml in range(3):#{
+ weights[ml] = int( weights[ml] * inv_norm )
+ weights[ml] = min( weights[ml], 65535 )
+ weights[ml] = max( weights[ml], 0 )
+ #}
+ #}
+ #}
+ else:#{
+ li1 = tri.loops[(j+1)%3]
+ vi1 = data.loops[li1].vertex_index
+ e0 = data.edges[ data.loops[li].edge_index ]
+
+ if e0.use_freestyle_mark and \
+ ((e0.vertices[0] == vi and e0.vertices[1] == vi1) or \
+ (e0.vertices[0] == vi1 and e0.vertices[1] == vi)):
+ #{
+ weights[0] = 1
+ #}
+ #}
+
+ TOLERENCE = float(10**4)
+ key = (int(co[0]*TOLERENCE+0.5),
+ int(co[1]*TOLERENCE+0.5),
+ int(co[2]*TOLERENCE+0.5),
+ int(norm[0]*TOLERENCE+0.5),
+ int(norm[1]*TOLERENCE+0.5),
+ int(norm[2]*TOLERENCE+0.5),
+ int(uv[0]*TOLERENCE+0.5),
+ int(uv[1]*TOLERENCE+0.5),
+ colour[0], # these guys are already quantized
+ colour[1], # .
+ colour[2], # .
+ colour[3], # .
+ weights[0], # v
+ weights[1],
+ weights[2],
+ weights[3],
+ groups[0],
+ groups[1],
+ groups[2],
+ groups[3])
+
+ if key in vertex_reference:
+ index = vertex_reference[key]
+ else:#{
+ index = bytearray(c_uint32(sm.vertex_count))
+ sm.vertex_count+=1
+
+ vertex_reference[key] = index
+ v = mdl_vert()
+ v.co[0] = co[0]
+ v.co[1] = co[2]
+ v.co[2] = -co[1]
+ v.norm[0] = norm[0]
+ v.norm[1] = norm[2]
+ v.norm[2] = -norm[1]
+ v.uv[0] = uv[0]
+ v.uv[1] = uv[1]
+ v.colour[0] = colour[0]
+ v.colour[1] = colour[1]
+ v.colour[2] = colour[2]
+ v.colour[3] = colour[3]
+ v.weights[0] = weights[0]
+ v.weights[1] = weights[1]
+ v.weights[2] = weights[2]
+ v.weights[3] = weights[3]
+ v.groups[0] = groups[0]
+ v.groups[1] = groups[1]
+ v.groups[2] = groups[2]
+ v.groups[3] = groups[3]
+
+ for i in range(3):#{
+ sm.bbx[0][i] = min( sm.bbx[0][i], v.co[i] )
+ sm.bbx[1][i] = max( sm.bbx[1][i], v.co[i] )
+ #}
+
+ sr_compile.vertex_data.extend(bytearray(v))
+ #}
+
+ sm.indice_count += 1
+ sr_compile.indice_data.extend( index )
+ #}
+ #}
+
+ # Make sure bounding box isn't -inf -> inf if no vertices
+ #
+ if sm.vertex_count == 0:
+ for j in range(2):
+ for i in range(3):
+ sm.bbx[j][i] = 0
+
+ # Add submesh to encoder
+ #
+ sr_compile.submesh_data.extend( bytearray(sm) )
+ submesh_count += 1
+ #}
+
+ if armature:#{
+ armature.data.pose_position = POSE_OR_REST_CACHE
+ #}
+
+ # Save a reference to this mesh since we want to reuse the submesh indices
+ # later.
+ sr_compile.mesh_cache[obj.data.name]=(submesh_start,submesh_count)
+ return (submesh_start,submesh_count,armature_id)
+#}
+
+def sr_compile_mesh( obj ):
+#{
+ node=mdl_mesh()
+ compile_obj_transform(obj, node.transform)
+ node.pstr_name = sr_compile_string(obj.name)
+ ent_type = obj_ent_type( obj )
+
+ node.entity_id = 0
+
+ if ent_type != 'none':#{
+ ent_id_lwr = sr_compile.entity_ids[obj.name]
+ ent_id_upr = get_entity_enum_id( obj_ent_type(obj) )
+ node.entity_id = (ent_id_upr << 16) | ent_id_lwr
+ #}
+
+ node.submesh_start, node.submesh_count, node.armature_id = \
+ sr_compile_mesh_internal( obj )
+
+ sr_compile.mesh_data.extend(bytearray(node))
+#}
+
+def sr_compile_fonts( collection ):
+#{
+ print( F"[SR] Compiling fonts" )
+
+ glyph_count = 0
+ variant_count = 0
+
+ for obj in collection.all_objects:#{
+ if obj_ent_type(obj) != 'ent_font': continue
+
+ data = obj.SR_data.ent_font[0]
+
+ font=ent_font()
+ font.alias = sr_compile_string( data.alias )
+ font.variant_start = variant_count
+ font.variant_count = 0
+ font.glyph_start = glyph_count
+
+ glyph_base = data.glyphs[0].utf32
+ glyph_range = data.glyphs[-1].utf32+1 - glyph_base
+
+ font.glyph_utf32_base = glyph_base
+ font.glyph_count = glyph_range
+
+ for i in range(len(data.variants)):#{
+ data_var = data.variants[i]
+ if not data_var.mesh: continue
+
+ mesh = data_var.mesh.data
+
+ variant = ent_font_variant()
+ variant.name = sr_compile_string( data_var.tipo )
+
+ # fonts (variants) only support one material each
+ mat = None
+ if len(mesh.materials) != 0:
+ mat = mesh.materials[0]
+ variant.material_id = sr_compile_material( mat )
+
+ font.variant_count += 1
+
+ islands = mesh_utils.mesh_linked_triangles(mesh)
+ centroids = [Vector((0,0)) for _ in range(len(islands))]
+
+ for j in range(len(islands)):#{
+ for tri in islands[j]:#{
+ centroids[j].x += tri.center[0]
+ centroids[j].y += tri.center[2]
+ #}
+
+ centroids[j] /= len(islands[j])
+ #}
+
+ for j in range(glyph_range):#{
+ data_glyph = data.glyphs[j]
+ glyph = ent_glyph()
+ glyph.indice_start = len(sr_compile.indice_data)//sizeof(c_uint32)
+ glyph.indice_count = 0
+ glyph.size[0] = data_glyph.bounds[2]
+ glyph.size[1] = data_glyph.bounds[3]
+
+ vertex_reference = {}
+
+ for k in range(len(islands)):#{
+ if centroids[k].x < data_glyph.bounds[0] or \
+ centroids[k].x > data_glyph.bounds[0]+data_glyph.bounds[2] or\
+ centroids[k].y < data_glyph.bounds[1] or \
+ centroids[k].y > data_glyph.bounds[1]+data_glyph.bounds[3]:
+ #{
+ continue
+ #}
+
+ for l in range(len(islands[k])):#{
+ tri = islands[k][l]
+ for m in range(3):#{
+ vert = mesh.vertices[tri.vertices[m]]
+ li = tri.loops[m]
+ vi = mesh.loops[li].vertex_index
+
+ # Gather vertex information
+ #
+ co = [vert.co[_] for _ in range(3)]
+ co[0] -= data_glyph.bounds[0]
+ co[2] -= data_glyph.bounds[1]
+ norm = mesh.loops[li].normal
+ uv = (0,0)
+ if mesh.uv_layers: uv = mesh.uv_layers.active.data[li].uv
+
+ TOLERENCE = float(10**4)
+ key = (int(co[0]*TOLERENCE+0.5),
+ int(co[1]*TOLERENCE+0.5),
+ int(co[2]*TOLERENCE+0.5),
+ int(norm[0]*TOLERENCE+0.5),
+ int(norm[1]*TOLERENCE+0.5),
+ int(norm[2]*TOLERENCE+0.5),
+ int(uv[0]*TOLERENCE+0.5),
+ int(uv[1]*TOLERENCE+0.5))
+
+ if key in vertex_reference:
+ index = vertex_reference[key]
+ else:#{
+ vindex = len(sr_compile.vertex_data)//sizeof(mdl_vert)
+ index = bytearray(c_uint32(vindex))
+ vertex_reference[key] = index
+ v = mdl_vert()
+ v.co[0] = co[0]
+ v.co[1] = co[2]
+ v.co[2] = -co[1]
+ v.norm[0] = norm[0]
+ v.norm[1] = norm[2]
+ v.norm[2] = -norm[1]
+ v.uv[0] = uv[0]
+ v.uv[1] = uv[1]
+
+ sr_compile.vertex_data.extend(bytearray(v))
+ #}
+
+ glyph.indice_count += 1
+ sr_compile.indice_data.extend( index )
+ #}
+ #}
+ #}
+ sr_ent_push( glyph )
+ #}
+ sr_ent_push( variant )
+ #}
+ sr_ent_push( font )
+ #}
+#}
+
+def sr_compile_menus( collection ):
+#{
+ print( "[SR1] Compiling menus" )
+ groups = []
+
+ for obj in collection.all_objects:#{
+ if obj_ent_type(obj) != 'ent_menuitem': continue
+ obj_data = obj.SR_data.ent_menuitem[0]
+
+ bitmask = 0x00000000
+
+ for col in obj.users_collection:#{
+ name = col.name
+ if name not in groups: groups.append( name )
+ bitmask |= (0x1 << groups.index(name))
+ #}
+
+ item = ent_menuitem()
+ item.type = int( obj_data.tipo )
+ item.groups = bitmask
+
+ compile_obj_transform( obj, item.transform )
+ if obj.type == 'MESH':#{
+ item.submesh_start, item.submesh_count, _ = \
+ sr_compile_mesh_internal( obj )
+ #}
+
+ if item.type == 1 or item.type == 2:#{
+ item_button = item._anonymous_union.button
+ item_button.pstr = sr_compile_string( obj_data.string )
+ #}
+ elif item.type == 3:#{
+ item_checkmark = item._anonymous_union.checkmark
+ item_checkmark.pstr_data = sr_compile_string( obj_data.string )
+ item_checkmark.id_check = sr_entity_id( obj_data.checkmark )
+ delta = obj_data.checkmark.location - obj.location
+ item_checkmark.offset[0] = delta[0]
+ item_checkmark.offset[1] = delta[2]
+ item_checkmark.offset[2] = -delta[1]
+ #}
+ elif item.type == 4:#{
+ item_slider = item._anonymous_union.slider
+ item_slider.id_min = sr_entity_id( obj_data.slider_minloc )
+ item_slider.id_max = sr_entity_id( obj_data.slider_maxloc )
+ item_slider.id_handle = sr_entity_id( obj_data.slider_handle )
+ item_slider.pstr_data = sr_compile_string( obj_data.string )
+ #}
+ elif item.type == 5:#{
+ item_page = item._anonymous_union.page
+ item_page.pstr_name = sr_compile_string( obj_data.string )
+ item_page.id_entrypoint = sr_entity_id( obj_data.newloc )
+ item_page.id_viewpoint = sr_entity_id( obj_data.camera )
+ #}
+
+ if obj_data.link0:
+ item.id_links[0] = sr_entity_id( obj_data.link0 )
+ if obj_data.link1:
+ item.id_links[1] = sr_entity_id( obj_data.link1 )
+ if item.type != 4:#{
+ if obj_data.link2:
+ item.id_links[2] = sr_entity_id( obj_data.link2 )
+ if obj_data.link3:
+ item.id_links[3] = sr_entity_id( obj_data.link3 )
+ #}
+
+ sr_ent_push( item )
+ #}
+#}
+
+def sr_compile_armature( obj ):
+#{
+ node = mdl_armature()
+ node.bone_start = len(sr_compile.bone_data)//sizeof(mdl_bone)
+ node.bone_count = 0
+ node.anim_start = len(sr_compile.anim_data)//sizeof(mdl_animation)
+ node.anim_count = 0
+
+ bones = [_ for _ in sr_armature_bones(obj)]
+ bones_names = [None]+[_.name for _ in bones]
+
+ for b in bones:#{
+ bone = mdl_bone()
+ if b.use_deform: bone.flags = 0x1
+ if b.parent: bone.parent = bones_names.index(b.parent.name)
+
+ bone.collider = int(b.SR_data.collider)
+
+ if bone.collider>0:#{
+ bone.hitbox[0][0] = b.SR_data.collider_min[0]
+ bone.hitbox[0][1] = b.SR_data.collider_min[2]
+ bone.hitbox[0][2] = -b.SR_data.collider_max[1]
+ bone.hitbox[1][0] = b.SR_data.collider_max[0]
+ bone.hitbox[1][1] = b.SR_data.collider_max[2]
+ bone.hitbox[1][2] = -b.SR_data.collider_min[1]
+ #}
+
+ if b.SR_data.cone_constraint:#{
+ bone.flags |= 0x4
+ bone.conevx[0] = b.SR_data.conevx[0]
+ bone.conevx[1] = b.SR_data.conevx[2]
+ bone.conevx[2] = -b.SR_data.conevx[1]
+ bone.conevy[0] = b.SR_data.conevy[0]
+ bone.conevy[1] = b.SR_data.conevy[2]
+ bone.conevy[2] = -b.SR_data.conevy[1]
+ bone.coneva[0] = b.SR_data.coneva[0]
+ bone.coneva[1] = b.SR_data.coneva[2]
+ bone.coneva[2] = -b.SR_data.coneva[1]
+ bone.conet = b.SR_data.conet
+ #}
+
+ bone.co[0] = b.head_local[0]
+ bone.co[1] = b.head_local[2]
+ bone.co[2] = -b.head_local[1]
+ bone.end[0] = b.tail_local[0] - bone.co[0]
+ bone.end[1] = b.tail_local[2] - bone.co[1]
+ bone.end[2] = -b.tail_local[1] - bone.co[2]
+ bone.pstr_name = sr_compile_string( b.name )
+
+ for c in obj.pose.bones[b.name].constraints:#{
+ if c.type == 'IK':#{
+ bone.flags |= 0x2
+ bone.ik_target = bones_names.index(c.subtarget)
+ bone.ik_pole = bones_names.index(c.pole_subtarget)
+ #}
+ #}
+
+ node.bone_count += 1
+ sr_compile.bone_data.extend(bytearray(bone))
+ #}
+
+ # Compile anims
+ #
+ if obj.animation_data and sr_compile.pack_animations: #{
+ # So we can restore later
+ #
+ previous_frame = bpy.context.scene.frame_current
+ previous_action = obj.animation_data.action
+ POSE_OR_REST_CACHE = obj.data.pose_position
+ obj.data.pose_position = 'POSE'
+
+ for NLALayer in obj.animation_data.nla_tracks:#{
+ for NLAStrip in NLALayer.strips:#{
+ # set active
+ #
+ for a in bpy.data.actions:#{
+ if a.name == NLAStrip.name:#{
+ obj.animation_data.action = a
+ break
+ #}
+ #}
+
+ # Clip to NLA settings
+ #
+ anim_start = int(NLAStrip.action_frame_start)
+ anim_end = int(NLAStrip.action_frame_end)
+
+ # Export strips
+ #
+ anim = mdl_animation()
+ anim.pstr_name = sr_compile_string( NLAStrip.action.name )
+ anim.rate = 30.0
+ anim.keyframe_start = len(sr_compile.keyframe_data)//\
+ sizeof(mdl_transform)
+ anim.length = anim_end-anim_start
+
+ i = 0
+ # Export the keyframes
+ for frame in range(anim_start,anim_end):#{
+ bpy.context.scene.frame_set(frame)
+
+ for rb in bones:#{
+ pb = obj.pose.bones[rb.name]
+
+ # relative bone matrix
+ if rb.parent is not None:#{
+ offset_mtx = rb.parent.matrix_local
+ offset_mtx = offset_mtx.inverted_safe() @ \
+ rb.matrix_local
+
+ inv_parent = pb.parent.matrix @ offset_mtx
+ inv_parent.invert_safe()
+ fpm = inv_parent @ pb.matrix
+ #}
+ else:#{
+ bone_mtx = rb.matrix.to_4x4()
+ local_inv = rb.matrix_local.inverted_safe()
+ fpm = bone_mtx @ local_inv @ pb.matrix
+ #}
+
+ loc, rot, sca = fpm.decompose()
+
+ # rotation
+ lc_m = pb.matrix_channel.to_3x3()
+ if pb.parent is not None:#{
+ smtx = pb.parent.matrix_channel.to_3x3()
+ lc_m = smtx.inverted() @ lc_m
+ #}
+ rq = lc_m.to_quaternion()
+ q_normalize( rq )
+
+ kf = mdl_transform()
+ kf.co[0] = loc[0]
+ kf.co[1] = loc[2]
+ kf.co[2] = -loc[1]
+ kf.q[0] = rq[1]
+ kf.q[1] = rq[3]
+ kf.q[2] = -rq[2]
+ kf.q[3] = rq[0]
+ kf.s[0] = sca[0]
+ kf.s[1] = sca[1]
+ kf.s[2] = sca[2]
+ sr_compile.keyframe_data.extend(bytearray(kf))
+
+ i+=1
+ #}
+ #}
+
+ # Add to animation buffer
+ #
+ sr_compile.anim_data.extend(bytearray(anim))
+ node.anim_count += 1
+
+ # Report progress
+ #
+ print( F"[SR] | anim( {NLAStrip.action.name} )" )
+ #}
+ #}
+
+ # Restore context to how it was before
+ #
+ bpy.context.scene.frame_set( previous_frame )
+ obj.animation_data.action = previous_action
+ obj.data.pose_position = POSE_OR_REST_CACHE
+ #}
+
+ sr_compile.armature_data.extend(bytearray(node))
+#}
+
+def sr_ent_push( struct ):
+#{
+ clase = type(struct).__name__
+
+ if clase not in sr_compile.entity_data:#{
+ sr_compile.entity_data[ clase ] = bytearray()
+ sr_compile.entity_info[ clase ] = { 'size': sizeof(struct) }
+ #}
+
+ index = len(sr_compile.entity_data[ clase ])//sizeof(struct)
+ sr_compile.entity_data[ clase ].extend( bytearray(struct) )
+ return index
+#}
+
+def sr_array_title( arr, name, count, size, offset ):
+#{
+ for i in range(len(name)):#{
+ arr.name[i] = ord(name[i])
+ #}
+ arr.file_offset = offset
+ arr.item_count = count
+ arr.item_size = size
+#}
+
+def hash_djb2(s):
+#{
+ picadillo = 5381
+ for x in s:#{
+ picadillo = (((picadillo << 5) + picadillo) + ord(x)) & 0xFFFFFFFF
+ #}
+ return picadillo
+#}
+
+def sr_compile( collection ):
+#{
+ print( F"[SR] compiler begin ({collection.name}.mdl)" )
+
+ #settings
+ sr_compile.pack_textures = collection.SR_data.pack_textures
+ sr_compile.pack_animations = collection.SR_data.animations
+
+ # caches
+ sr_compile.string_cache = {}
+ sr_compile.mesh_cache = {}
+ sr_compile.material_cache = {}
+ sr_compile.texture_cache = {}
+
+ # compiled data
+ sr_compile.mesh_data = bytearray()
+ sr_compile.submesh_data = bytearray()
+ sr_compile.vertex_data = bytearray()
+ sr_compile.indice_data = bytearray()
+ sr_compile.bone_data = bytearray()
+ sr_compile.material_data = bytearray()
+ sr_compile.armature_data = bytearray()
+ sr_compile.anim_data = bytearray()
+ sr_compile.keyframe_data = bytearray()
+ sr_compile.texture_data = bytearray()
+
+ # just bytes not structures
+ sr_compile.string_data = bytearray()
+ sr_compile.pack_data = bytearray()
+
+ # variable
+ sr_compile.entity_data = {}
+ sr_compile.entity_info = {}
+
+ print( F"[SR] assign entity ID's" )
+ sr_compile.entities = {}
+ sr_compile.entity_ids = {}
+
+ # begin
+ # -------------------------------------------------------
+
+ sr_compile_string( "null" )
+
+ mesh_count = 0
+ for obj in collection.all_objects: #{
+ if obj.type == 'MESH':#{
+ mesh_count += 1
+ #}
+
+ ent_type = obj_ent_type( obj )
+ if ent_type == 'none': continue
+
+ if ent_type not in sr_compile.entities: sr_compile.entities[ent_type] = []
+ sr_compile.entity_ids[obj.name] = len( sr_compile.entities[ent_type] )
+ sr_compile.entities[ent_type] += [obj]
+ #}
+
+ print( F"[SR] Compiling geometry" )
+ i=0
+ for obj in collection.all_objects:#{
+ if obj.type == 'MESH':#{
+ i+=1
+
+ ent_type = obj_ent_type( obj )
+
+ # entity ignore mesh list
+ #
+ if ent_type == 'ent_traffic': continue
+ if ent_type == 'ent_font': continue
+ if ent_type == 'ent_font_variant': continue
+ if ent_type == 'ent_menuitem': continue
+ if ent_type == 'ent_objective': continue
+
+ #TODO: This is messy.
+ if ent_type == 'ent_gate':#{
+ obj_data = obj.SR_data.ent_gate[0]
+ if obj_data.custom: continue
+ #}
+ #--------------------------
+
+ print( F'[SR] {i: 3}/{mesh_count} {obj.name:<40}', end='\r' )
+ sr_compile_mesh( obj )
+ #}
+ #}
+
+ audio_clip_count = 0
+
+ for ent_type, arr in sr_compile.entities.items():#{
+ print(F"[SR] Compiling {len(arr)} {ent_type}{'s' if len(arr)>1 else ''}")
+
+ for i in range(len(arr)):#{
+ obj = arr[i]
+
+ print( F"[SR] {i+1: 3}/{len(arr)} {obj.name:<40} ",end='\r' )
+
+ if ent_type == 'mdl_armature': sr_compile_armature(obj)
+ elif ent_type == 'ent_light': #{
+ light = ent_light()
+ compile_obj_transform( obj, light.transform )
+ light.daytime = obj.data.SR_data.daytime
+ if obj.data.type == 'POINT':#{
+ light.type = 0
+ #}
+ elif obj.data.type == 'SPOT':#{
+ light.type = 1
+ light.angle = obj.data.spot_size*0.5
+ #}
+ light.range = obj.data.cutoff_distance
+ light.colour[0] = obj.data.color[0]
+ light.colour[1] = obj.data.color[1]
+ light.colour[2] = obj.data.color[2]
+ light.colour[3] = obj.data.energy
+ sr_ent_push( light )
+ #}
+ elif ent_type == 'ent_camera': #{
+ cam = ent_camera()
+ compile_obj_transform( obj, cam.transform )
+ cam.fov = obj.data.angle * 45.0
+ sr_ent_push(cam)
+ #}
+ elif ent_type == 'ent_gate': #{
+ gate = ent_gate()
+ obj_data = obj.SR_data.ent_gate[0]
+ mesh_data = obj.data.SR_data.ent_gate[0]
+
+ flags = 0x0000
+
+ if obj_data.tipo == 'default':#{
+ if obj_data.target:#{
+ gate.target = sr_compile.entity_ids[obj_data.target.name]
+ flags |= 0x0001
+ #}
+ #}
+ elif obj_data.tipo == 'nonlocal':#{
+ gate.target = 0
+ gate.key = sr_compile_string(obj_data.key)
+ flags |= 0x0002
+ #}
+
+ if obj_data.flip: flags |= 0x0004
+ if obj_data.custom:#{
+ flags |= 0x0008
+ gate.submesh_start, gate.submesh_count, _ = \
+ sr_compile_mesh_internal( obj )
+ #}
+ if obj_data.locked: flags |= 0x0010
+ gate.flags = flags
+
+ gate.dimensions[0] = mesh_data.dimensions[0]
+ gate.dimensions[1] = mesh_data.dimensions[1]
+ gate.dimensions[2] = mesh_data.dimensions[2]
+
+ q = [obj.matrix_local.to_quaternion(), (0,0,0,1)]
+ co = [obj.matrix_world @ Vector((0,0,0)), (0,0,0)]
+
+ if obj_data.target:#{
+ q[1] = obj_data.target.matrix_local.to_quaternion()
+ co[1]= obj_data.target.matrix_world @ Vector((0,0,0))
+ #}
+
+ # Setup transform
+ #
+ for x in range(2):#{
+ gate.co[x][0] = co[x][0]
+ gate.co[x][1] = co[x][2]
+ gate.co[x][2] = -co[x][1]
+ gate.q[x][0] = q[x][1]
+ gate.q[x][1] = q[x][3]
+ gate.q[x][2] = -q[x][2]
+ gate.q[x][3] = q[x][0]
+ #}
+
+ sr_ent_push( gate )
+ #}
+ elif ent_type == 'ent_spawn': #{
+ spawn = ent_spawn()
+ compile_obj_transform( obj, spawn.transform )
+ obj_data = obj.SR_data.ent_spawn[0]
+ spawn.pstr_name = sr_compile_string( obj_data.alias )
+ sr_ent_push( spawn )
+ #}
+ elif ent_type == 'ent_water':#{
+ water = ent_water()
+ compile_obj_transform( obj, water.transform )
+ water.max_dist = 0.0
+ sr_ent_push( water )
+ #}
+ elif ent_type == 'ent_audio':#{
+ obj_data = obj.SR_data.ent_audio[0]
+ audio = ent_audio()
+ compile_obj_transform( obj, audio.transform )
+ audio.clip_start = audio_clip_count
+ audio.clip_count = len(obj_data.files)
+ audio_clip_count += audio.clip_count
+ audio.max_channels = obj_data.max_channels
+ audio.volume = obj_data.volume
+
+ # TODO flags:
+ # - allow/disable doppler
+ # - channel group tags with random colours
+ # - transition properties
+
+ if obj_data.flag_loop: audio.flags |= 0x1
+ if obj_data.flag_nodoppler: audio.flags |= 0x2
+ if obj_data.flag_3d: audio.flags |= 0x4
+ if obj_data.flag_auto: audio.flags |= 0x8
+ if obj_data.formato == '0': audio.flags |= 0x000
+ elif obj_data.formato == '1': audio.flags |= 0x400
+ elif obj_data.formato == '2': audio.flags |= 0x1000
+
+ audio.channel_behaviour = int(obj_data.channel_behaviour)
+ if audio.channel_behaviour >= 1:#{
+ audio.group = obj_data.group
+ #}
+ if audio.channel_behaviour == 2:#{
+ audio.crossfade = obj_data.transition_duration
+ #}
+ audio.probability_curve = int(obj_data.probability_curve)
+
+ for ci in range(audio.clip_count):#{
+ entry = obj_data.files[ci]
+ clip = ent_audio_clip()
+ clip.probability = entry.probability
+ if obj_data.formato == '2':#{
+ sr_pack_file( clip._anon.file, '', vg_str_bin(entry.path) )
+ #}
+ else:#{
+ clip._anon.file.path = sr_compile_string( entry.path )
+ clip._anon.file.pack_offset = 0
+ clip._anon.file.pack_size = 0
+ #}
+ sr_ent_push( clip )
+ #}
+ sr_ent_push( audio )
+ #}
+ elif ent_type == 'ent_volume':#{
+ obj_data = obj.SR_data.ent_volume[0]
+ volume = ent_volume()
+ volume.type = int(obj_data.subtype)
+ compile_obj_transform( obj, volume.transform )
+
+ if obj_data.target:#{
+ volume.target = sr_entity_id( obj_data.target )
+ volume._anon.trigger.event = obj_data.target_event
+ #}
+
+ sr_ent_push(volume)
+ #}
+ elif ent_type == 'ent_marker':#{
+ marker = ent_marker()
+ marker.name = sr_compile_string( obj.SR_data.ent_marker[0].alias )
+ compile_obj_transform( obj, marker.transform )
+ sr_ent_push(marker)
+ #}
+ elif ent_type == 'ent_skateshop':#{
+ skateshop = ent_skateshop()
+ obj_data = obj.SR_data.ent_skateshop[0]
+ skateshop.type = int(obj_data.tipo)
+ if skateshop.type == 0:#{
+ boardshop = skateshop._anonymous_union.boards
+ boardshop.id_display = sr_entity_id( obj_data.mark_display )
+ boardshop.id_info = sr_entity_id( obj_data.mark_info )
+ boardshop.id_rack = sr_entity_id( obj_data.mark_rack )
+ #}
+ elif skateshop.type == 1:#{
+ charshop = skateshop._anonymous_union.character
+ charshop.id_display = sr_entity_id( obj_data.mark_display )
+ charshop.id_info = sr_entity_id( obj_data.mark_info )
+ #}
+ elif skateshop.type == 2:#{
+ worldshop = skateshop._anonymous_union.worlds
+ worldshop.id_display = sr_entity_id( obj_data.mark_display )
+ worldshop.id_info = sr_entity_id( obj_data.mark_info )
+ #}
+ skateshop.id_camera = sr_entity_id( obj_data.cam )
+ compile_obj_transform( obj, skateshop.transform )
+ sr_ent_push(skateshop)
+ #}
+ elif ent_type == 'ent_swspreview':#{
+ workshop_preview = ent_swspreview()
+ obj_data = obj.SR_data.ent_swspreview[0]
+ workshop_preview.id_display = sr_entity_id( obj_data.mark_display )
+ workshop_preview.id_display1 = sr_entity_id( obj_data.mark_display1)
+ workshop_preview.id_camera = sr_entity_id( obj_data.cam )
+ sr_ent_push( workshop_preview )
+ #}
+ elif ent_type == 'ent_worldinfo':#{
+ worldinfo = ent_worldinfo()
+ obj_data = obj.SR_data.ent_worldinfo[0]
+ worldinfo.pstr_name = sr_compile_string( obj_data.name )
+ worldinfo.pstr_author = sr_compile_string( obj_data.author )
+ worldinfo.pstr_desc = sr_compile_string( obj_data.desc )
+ worldinfo.timezone = obj_data.timezone
+ sr_ent_push( worldinfo )
+ #}
+ elif ent_type == 'ent_ccmd':#{
+ ccmd = ent_ccmd()
+ obj_data = obj.SR_data.ent_ccmd[0]
+ ccmd.pstr_command = sr_compile_string( obj_data.command )
+ sr_ent_push( ccmd )
+ #}
+ elif ent_type == 'ent_objective':#{
+ objective = ent_objective()
+ obj_data = obj.SR_data.ent_objective[0]
+ objective.id_next = sr_entity_id( obj_data.proxima )
+ objective.id_win = sr_entity_id( obj_data.target )
+ objective.win_event = obj_data.target_event
+ objective.filter = int(obj_data.filtrar)
+ objective.filter2 = 0
+ objective.time_limit = obj_data.time_limit
+
+ compile_obj_transform( obj, objective.transform )
+ objective.submesh_start, objective.submesh_count, _ = \
+ sr_compile_mesh_internal( obj )
+
+ sr_ent_push( objective )
+ #}
+ elif ent_type == 'ent_challenge':#{
+ challenge = ent_challenge()
+ obj_data = obj.SR_data.ent_challenge[0]
+ compile_obj_transform( obj, challenge.transform )
+ challenge.pstr_alias = sr_compile_string( obj_data.alias )
+ challenge.target = sr_entity_id( obj_data.target )
+ challenge.target_event = obj_data.target_event
+ challenge.reset = sr_entity_id( obj_data.reset )
+ challenge.reset_event = obj_data.reset_event
+ challenge.first = sr_entity_id( obj_data.first )
+ challenge.flags = 0x00
+ challenge.camera = sr_entity_id( obj_data.camera )
+ if obj_data.time_limit: challenge.flags |= 0x01
+ challenge.status = 0
+ sr_ent_push( challenge )
+ #}
+ elif ent_type == 'ent_relay':#{
+ relay = ent_relay()
+ obj_data = obj.SR_data.ent_relay[0]
+ relay.targets[0][0] = sr_entity_id( obj_data.target0 )
+ relay.targets[1][0] = sr_entity_id( obj_data.target1 )
+ relay.targets[2][0] = sr_entity_id( obj_data.target2 )
+ relay.targets[3][0] = sr_entity_id( obj_data.target3 )
+ relay.targets[0][1] = obj_data.target0_event
+ relay.targets[1][1] = obj_data.target1_event
+ relay.targets[2][1] = obj_data.target2_event
+ relay.targets[3][1] = obj_data.target3_event
+ sr_ent_push( relay )
+ #}
+ elif ent_type == 'ent_cubemap':#{
+ cubemap = ent_cubemap()
+ co = obj.matrix_world @ Vector((0,0,0))
+ cubemap.co[0] = co[0]
+ cubemap.co[1] = co[2]
+ cubemap.co[2] = -co[1]
+ cubemap.resolution = 0
+ cubemap.live = 60
+ sr_ent_push( cubemap )
+ #}
+ #}
+ #}
+
+ sr_compile_menus( collection )
+ sr_compile_fonts( collection )
+
+ def _children( col ):#{
+ yield col
+ for c in col.children:#{
+ yield from _children(c)
+ #}
+ #}
+
+ checkpoint_count = 0
+ pathindice_count = 0
+ routenode_count = 0
+
+ for col in _children(collection):#{
+ print( F"Adding routes for subcollection: {col.name}" )
+ route_gates = []
+ route_curves = []
+ routes = []
+ traffics = []
+
+ for obj in col.objects:#{
+ if obj.type == 'ARMATURE': pass
+ else:#{
+ ent_type = obj_ent_type( obj )
+
+ if ent_type == 'ent_gate':
+ route_gates += [obj]
+ elif ent_type == 'ent_route_node':#{
+ if obj.type == 'CURVE':#{
+ route_curves += [obj]
+ #}
+ #}
+ elif ent_type == 'ent_route':
+ routes += [obj]
+ elif ent_type == 'ent_traffic':
+ traffics += [obj]
+ #}
+ #}
+
+ dij = create_node_graph( route_curves, route_gates )
+
+ for obj in routes:#{
+ obj_data = obj.SR_data.ent_route[0]
+ route = ent_route()
+ route.pstr_name = sr_compile_string( obj_data.alias )
+ route.checkpoints_start = checkpoint_count
+ route.checkpoints_count = 0
+
+ for ci in range(3):
+ route.colour[ci] = obj_data.colour[ci]
+ route.colour[3] = 1.0
+
+ compile_obj_transform( obj, route.transform )
+ checkpoints = obj_data.gates
+
+ for i in range(len(checkpoints)):#{
+ gi = checkpoints[i].target
+ gj = checkpoints[(i+1)%len(checkpoints)].target
+ gate = gi
+
+ if gi:#{
+ dest = gi.SR_data.ent_gate[0].target
+ gi = dest
+ #}
+
+ if gi==gj: continue # error?
+ if not gi or not gj: continue
+
+ checkpoint = ent_checkpoint()
+ checkpoint.gate_index = sr_compile.entity_ids[gate.name]
+ checkpoint.path_start = pathindice_count
+ checkpoint.path_count = 0
+
+ path = solve_graph( dij, gi.name, gj.name )
+
+ if path:#{
+ for pi in range(len(path)):#{
+ pathindice = ent_path_index()
+ pathindice.index = routenode_count + path[pi]
+ sr_ent_push( pathindice )
+
+ checkpoint.path_count += 1
+ pathindice_count += 1
+ #}
+ #}
+
+ sr_ent_push( checkpoint )
+ route.checkpoints_count += 1
+ checkpoint_count += 1
+ #}
+
+ sr_ent_push( route )
+ #}
+
+ for obj in traffics:#{
+ traffic = ent_traffic()
+ compile_obj_transform( obj, traffic.transform )
+ traffic.submesh_start, traffic.submesh_count, _ = \
+ sr_compile_mesh_internal( obj )
+
+ # find best subsection
+
+ graph_keys = list(dij.graph)
+ min_dist = 100.0
+ best_point = 0
+
+ for j in range(len(dij.points)):#{
+ point = dij.points[j]
+ dist = (point-obj.location).magnitude
+
+ if dist < min_dist:#{
+ min_dist = dist
+ best_point = j
+ #}
+ #}
+
+ # scan to each edge
+ best_begin = best_point
+ best_end = best_point
+
+ while True:#{
+ map0 = dij.subsections[best_begin]
+ if map0[1] == -1: break
+ best_begin = map0[1]
+ #}
+ while True:#{
+ map1 = dij.subsections[best_end]
+ if map1[2] == -1: break
+ best_end = map1[2]
+ #}
+
+ traffic.start_node = routenode_count + best_begin
+ traffic.node_count = best_end - best_begin
+ traffic.index = best_point - best_begin
+ traffic.speed = obj.SR_data.ent_traffic[0].speed
+ traffic.t = 0.0
+
+ sr_ent_push(traffic)
+ #}
+
+ for point in dij.points:#{
+ rn = ent_route_node()
+ rn.co[0] = point[0]
+ rn.co[1] = point[2]
+ rn.co[2] = -point[1]
+ sr_ent_push( rn )
+ #}
+
+ routenode_count += len(dij.points)
+ #}
+
+ print( F"[SR] Writing file" )
+
+ file_array_instructions = {}
+ file_offset = 0
+
+ def _write_array( name, item_size, data ):#{
+ nonlocal file_array_instructions, file_offset
+
+ count = len(data)//item_size
+ file_array_instructions[name] = {'count':count, 'size':item_size,\
+ 'data':data, 'offset': file_offset}
+ file_offset += len(data)
+ file_offset = int_align_to( file_offset, 8 )
+ #}
+
+ _write_array( 'strings', 1, sr_compile.string_data )
+ _write_array( 'mdl_mesh', sizeof(mdl_mesh), sr_compile.mesh_data )
+ _write_array( 'mdl_submesh', sizeof(mdl_submesh), sr_compile.submesh_data )
+ _write_array( 'mdl_material', sizeof(mdl_material), sr_compile.material_data)
+ _write_array( 'mdl_texture', sizeof(mdl_texture), sr_compile.texture_data)
+ _write_array( 'mdl_armature', sizeof(mdl_armature), sr_compile.armature_data)
+ _write_array( 'mdl_bone', sizeof(mdl_bone), sr_compile.bone_data )
+
+ for name, buffer in sr_compile.entity_data.items():#{
+ _write_array( name, sr_compile.entity_info[name]['size'], buffer )
+ #}
+
+ _write_array( 'mdl_animation', sizeof(mdl_animation), sr_compile.anim_data)
+ _write_array( 'mdl_keyframe', sizeof(mdl_transform),sr_compile.keyframe_data)
+ _write_array( 'mdl_vert', sizeof(mdl_vert), sr_compile.vertex_data )
+ _write_array( 'mdl_indice', sizeof(c_uint32), sr_compile.indice_data )
+ _write_array( 'pack', 1, sr_compile.pack_data )
+
+ header_size = int_align_to( sizeof(mdl_header), 8 )
+ index_size = int_align_to( sizeof(mdl_array)*len(file_array_instructions),8 )
+
+ folder = bpy.path.abspath(bpy.context.scene.SR_data.export_dir)
+ path = F"{folder}{collection.name}.mdl"
+ print( path )
+
+ os.makedirs(os.path.dirname(path),exist_ok=True)
+ fp = open( path, "wb" )
+ header = mdl_header()
+ header.version = MDL_VERSION_NR
+ sr_array_title( header.arrays, \
+ 'index', len(file_array_instructions), \
+ sizeof(mdl_array), header_size )
+
+ fp.write( bytearray_align_to( bytearray(header), 8 ) )
+
+ print( F'[SR] {"name":>16}| count | offset' )
+ index = bytearray()
+ for name,info in file_array_instructions.items():#{
+ arr = mdl_array()
+ offset = info['offset'] + header_size + index_size
+ sr_array_title( arr, name, info['count'], info['size'], offset )
+ index.extend( bytearray(arr) )
+
+ print( F'[SR] {name:>16}| {info["count"]: 8} '+\
+ F' 0x{info["offset"]:02x}' )
+ #}
+ fp.write( bytearray_align_to( index, 8 ) )
+ #bytearray_print_hex( index )
+
+ for name,info in file_array_instructions.items():#{
+ fp.write( bytearray_align_to( info['data'], 8 ) )
+ #}
+
+ fp.close()
+
+ print( '[SR] done' )
+#}
+
+class SR_SCENE_SETTINGS(bpy.types.PropertyGroup):
+#{
+ use_hidden: bpy.props.BoolProperty( name="use hidden", default=False )
+ export_dir: bpy.props.StringProperty( name="Export Dir", subtype='DIR_PATH' )
+ gizmos: bpy.props.BoolProperty( name="Draw Gizmos", default=False )
+
+ panel: bpy.props.EnumProperty(
+ name='Panel',
+ description='',
+ items=[
+ ('EXPORT', 'Export', '', 'MOD_BUILD',0),
+ ('ENTITY', 'Entity', '', 'MONKEY',1),
+ ('SETTINGS', 'Settings', 'Settings', 'PREFERENCES',2),
+ ],
+ )
+#}
+
+class SR_COLLECTION_SETTINGS(bpy.types.PropertyGroup):
+#{
+ pack_textures: bpy.props.BoolProperty( name="Pack Textures", default=False )
+ animations: bpy.props.BoolProperty( name="Export animation", default=True)
+#}
+
+def sr_get_mirror_bone( bones ):
+#{
+ side = bones.active.name[-1:]
+ other_name = bones.active.name[:-1]
+ if side == 'L': other_name += 'R'
+ elif side == 'R': other_name += 'L'
+ else: return None
+
+ for b in bones:#{
+ if b.name == other_name:
+ return b
+ #}
+
+ return None
+#}
+
+class SR_MIRROR_BONE_X(bpy.types.Operator):
+#{
+ bl_idname="skaterift.mirror_bone"
+ bl_label="Mirror bone attributes - SkateRift"
+
+ def execute(_,context):
+ #{
+ active_object = context.active_object
+ bones = active_object.data.bones
+ a = bones.active
+ b = sr_get_mirror_bone( bones )
+
+ if not b: return {'FINISHED'}
+
+ b.SR_data.collider = a.SR_data.collider
+
+ def _v3copyflipy( a, b ):#{
+ b[0] = a[0]
+ b[1] = -a[1]
+ b[2] = a[2]
+ #}
+
+ _v3copyflipy( a.SR_data.collider_min, b.SR_data.collider_min )
+ _v3copyflipy( a.SR_data.collider_max, b.SR_data.collider_max )
+ b.SR_data.collider_min[1] = -a.SR_data.collider_max[1]
+ b.SR_data.collider_max[1] = -a.SR_data.collider_min[1]
+
+ b.SR_data.cone_constraint = a.SR_data.cone_constraint
+
+ _v3copyflipy( a.SR_data.conevx, b.SR_data.conevy )
+ _v3copyflipy( a.SR_data.conevy, b.SR_data.conevx )
+ _v3copyflipy( a.SR_data.coneva, b.SR_data.coneva )
+
+ b.SR_data.conet = a.SR_data.conet
+
+ # redraw
+ ob = bpy.context.scene.objects[0]
+ ob.hide_render = ob.hide_render
+ return {'FINISHED'}
+ #}
+#}
+
+class SR_COMPILE(bpy.types.Operator):
+#{
+ bl_idname="skaterift.compile_all"
+ bl_label="Compile All"
+
+ def execute(_,context):