+ checkpoint = ent_checkpoint()
+ checkpoint.gate_index = sr_compile.entity_ids[gate.name]
+ checkpoint.path_start = pathindice_count
+ checkpoint.path_count = 0
+
+ path = solve_graph( dij, gi.name, gj.name )
+
+ if path:#{
+ for pi in range(len(path)):#{
+ pathindice = ent_path_index()
+ pathindice.index = routenode_count + path[pi]
+ sr_ent_push( pathindice )
+
+ checkpoint.path_count += 1
+ pathindice_count += 1
+ #}
+ #}
+
+ sr_ent_push( checkpoint )
+ route.checkpoints_count += 1
+ checkpoint_count += 1
+ #}
+
+ sr_ent_push( route )
+ #}
+
+ for obj in traffics:#{
+ traffic = ent_traffic()
+ compile_obj_transform( obj, traffic.transform )
+ traffic.submesh_start, traffic.submesh_count, _ = \
+ sr_compile_mesh_internal( obj )
+
+ # find best subsection
+
+ graph_keys = list(dij.graph)
+ min_dist = 100.0
+ best_point = 0
+
+ for j in range(len(dij.points)):#{
+ point = dij.points[j]
+ dist = (point-obj.location).magnitude
+
+ if dist < min_dist:#{
+ min_dist = dist
+ best_point = j
+ #}
+ #}
+
+ # scan to each edge
+ best_begin = best_point
+ best_end = best_point
+
+ while True:#{
+ map0 = dij.subsections[best_begin]
+ if map0[1] == -1: break
+ best_begin = map0[1]
+ #}
+ while True:#{
+ map1 = dij.subsections[best_end]
+ if map1[2] == -1: break
+ best_end = map1[2]
+ #}
+
+ traffic.start_node = routenode_count + best_begin
+ traffic.node_count = best_end - best_begin
+ traffic.index = best_point - best_begin
+ traffic.speed = obj.SR_data.ent_traffic[0].speed
+ traffic.t = 0.0
+
+ sr_ent_push(traffic)
+ #}
+
+ for point in dij.points:#{
+ rn = ent_route_node()
+ rn.co[0] = point[0]
+ rn.co[1] = point[2]
+ rn.co[2] = -point[1]
+ sr_ent_push( rn )
+ #}
+
+ routenode_count += len(dij.points)
+ #}
+
+ print( F"[SR] Writing file" )
+
+ file_array_instructions = {}
+ file_offset = 0
+
+ def _write_array( name, item_size, data ):#{
+ nonlocal file_array_instructions, file_offset
+
+ count = len(data)//item_size
+ file_array_instructions[name] = {'count':count, 'size':item_size,\
+ 'data':data, 'offset': file_offset}
+ file_offset += len(data)
+ file_offset = int_align_to( file_offset, 8 )
+ #}
+
+ _write_array( 'strings', 1, sr_compile.string_data )
+ _write_array( 'mdl_mesh', sizeof(mdl_mesh), sr_compile.mesh_data )
+ _write_array( 'mdl_submesh', sizeof(mdl_submesh), sr_compile.submesh_data )
+ _write_array( 'mdl_material', sizeof(mdl_material), sr_compile.material_data)
+ _write_array( 'mdl_texture', sizeof(mdl_texture), sr_compile.texture_data)
+ _write_array( 'mdl_armature', sizeof(mdl_armature), sr_compile.armature_data)
+ _write_array( 'mdl_bone', sizeof(mdl_bone), sr_compile.bone_data )
+
+ for name, buffer in sr_compile.entity_data.items():#{
+ _write_array( name, sr_compile.entity_info[name]['size'], buffer )
+ #}
+
+ _write_array( 'mdl_animation', sizeof(mdl_animation), sr_compile.anim_data)
+ _write_array( 'mdl_keyframe', sizeof(mdl_transform),sr_compile.keyframe_data)
+ _write_array( 'mdl_vert', sizeof(mdl_vert), sr_compile.vertex_data )
+ _write_array( 'mdl_indice', sizeof(c_uint32), sr_compile.indice_data )
+ _write_array( 'pack', 1, sr_compile.pack_data )
+
+ header_size = int_align_to( sizeof(mdl_header), 8 )
+ index_size = int_align_to( sizeof(mdl_array)*len(file_array_instructions),8 )
+
+ folder = bpy.path.abspath(bpy.context.scene.SR_data.export_dir)
+ path = F"{folder}{collection.name}.mdl"
+ print( path )
+
+ os.makedirs(os.path.dirname(path),exist_ok=True)
+ fp = open( path, "wb" )
+ header = mdl_header()
+ header.version = 101
+ sr_array_title( header.arrays, \
+ 'index', len(file_array_instructions), \
+ sizeof(mdl_array), header_size )
+
+ fp.write( bytearray_align_to( bytearray(header), 8 ) )
+
+ print( F'[SR] {"name":>16}| count | offset' )
+ index = bytearray()
+ for name,info in file_array_instructions.items():#{
+ arr = mdl_array()
+ offset = info['offset'] + header_size + index_size
+ sr_array_title( arr, name, info['count'], info['size'], offset )
+ index.extend( bytearray(arr) )
+
+ print( F'[SR] {name:>16}| {info["count"]: 8} '+\
+ F' 0x{info["offset"]:02x}' )
+ #}
+ fp.write( bytearray_align_to( index, 8 ) )
+ #bytearray_print_hex( index )
+
+ for name,info in file_array_instructions.items():#{
+ fp.write( bytearray_align_to( info['data'], 8 ) )
+ #}
+
+ fp.close()
+
+ print( '[SR] done' )
+#}
+
+class SR_SCENE_SETTINGS(bpy.types.PropertyGroup):
+#{
+ use_hidden: bpy.props.BoolProperty( name="use hidden", default=False )
+ export_dir: bpy.props.StringProperty( name="Export Dir", subtype='DIR_PATH' )
+ gizmos: bpy.props.BoolProperty( name="Draw Gizmos", default=True )
+
+ panel: bpy.props.EnumProperty(
+ name='Panel',
+ description='',
+ items=[
+ ('EXPORT', 'Export', '', 'MOD_BUILD',0),
+ ('ENTITY', 'Entity', '', 'MONKEY',1),
+ ('SETTINGS', 'Settings', 'Settings', 'PREFERENCES',2),
+ ],
+ )
+#}
+
+class SR_COLLECTION_SETTINGS(bpy.types.PropertyGroup):
+#{
+ pack_textures: bpy.props.BoolProperty( name="Pack Textures", default=False )
+ animations: bpy.props.BoolProperty( name="Export animation", default=True)
+#}
+
+def sr_get_mirror_bone( bones ):
+#{
+ side = bones.active.name[-1:]
+ other_name = bones.active.name[:-1]
+ if side == 'L': other_name += 'R'
+ elif side == 'R': other_name += 'L'
+ else: return None
+
+ for b in bones:#{
+ if b.name == other_name:
+ return b
+ #}
+
+ return None
+#}
+
+class SR_MIRROR_BONE_X(bpy.types.Operator):
+#{
+ bl_idname="skaterift.mirror_bone"
+ bl_label="Mirror bone attributes - SkateRift"
+
+ def execute(_,context):
+ #{
+ active_object = context.active_object
+ bones = active_object.data.bones
+ a = bones.active
+ b = sr_get_mirror_bone( bones )
+
+ if not b: return {'FINISHED'}
+
+ b.SR_data.collider = a.SR_data.collider
+
+ def _v3copyflipy( a, b ):#{
+ b[0] = a[0]
+ b[1] = -a[1]
+ b[2] = a[2]
+ #}
+
+ _v3copyflipy( a.SR_data.collider_min, b.SR_data.collider_min )
+ _v3copyflipy( a.SR_data.collider_max, b.SR_data.collider_max )
+ b.SR_data.collider_min[1] = -a.SR_data.collider_max[1]
+ b.SR_data.collider_max[1] = -a.SR_data.collider_min[1]
+
+ b.SR_data.cone_constraint = a.SR_data.cone_constraint
+
+ _v3copyflipy( a.SR_data.conevx, b.SR_data.conevy )
+ _v3copyflipy( a.SR_data.conevy, b.SR_data.conevx )
+ _v3copyflipy( a.SR_data.coneva, b.SR_data.coneva )
+
+ b.SR_data.conet = a.SR_data.conet
+
+ # redraw
+ ob = bpy.context.scene.objects[0]
+ ob.hide_render = ob.hide_render
+ return {'FINISHED'}
+ #}
+#}
+
+class SR_COMPILE(bpy.types.Operator):
+#{
+ bl_idname="skaterift.compile_all"
+ bl_label="Compile All"
+
+ def execute(_,context):
+ #{
+ view_layer = bpy.context.view_layer
+ for col in view_layer.layer_collection.children["export"].children:
+ if not col.hide_viewport or bpy.context.scene.SR_data.use_hidden:
+ sr_compile( bpy.data.collections[col.name] )
+
+ return {'FINISHED'}
+ #}
+#}
+
+class SR_COMPILE_THIS(bpy.types.Operator):
+#{
+ bl_idname="skaterift.compile_this"
+ bl_label="Compile This collection"
+
+ def execute(_,context):
+ #{
+ col = bpy.context.collection
+ sr_compile( col )
+
+ return {'FINISHED'}
+ #}
+#}
+
+class SR_INTERFACE(bpy.types.Panel):
+#{
+ bl_idname = "VIEW3D_PT_skate_rift"
+ bl_label = "Skate Rift"
+ bl_space_type = 'VIEW_3D'
+ bl_region_type = 'UI'
+ bl_category = "Skate Rift"
+
+ def draw(_, context):
+ #{
+ # Compiler section
+
+ row = _.layout.row()
+ row.scale_y = 1.75
+ row.prop( context.scene.SR_data, 'panel', expand=True )
+
+ if context.scene.SR_data.panel == 'SETTINGS': #{
+ _.layout.prop( context.scene.SR_data, 'gizmos' )
+ #}
+ elif context.scene.SR_data.panel == 'EXPORT': #{
+ _.layout.prop( context.scene.SR_data, "export_dir" )
+ col = bpy.context.collection
+
+ found_in_export = False
+ export_count = 0
+ view_layer = bpy.context.view_layer
+ for c1 in view_layer.layer_collection.children["export"].children: #{
+ if not c1.hide_viewport or bpy.context.scene.SR_data.use_hidden:
+ export_count += 1
+
+ if c1.name == col.name: #{
+ found_in_export = True
+ #}
+ #}
+
+ box = _.layout.box()
+ row = box.row()
+ row.alignment = 'CENTER'
+ row.scale_y = 1.5
+
+ if found_in_export: #{
+ row.label( text=col.name + ".mdl" )
+ box.prop( col.SR_data, "pack_textures" )
+ box.prop( col.SR_data, "animations" )
+ box.operator( "skaterift.compile_this" )
+ #}
+ else: #{
+ row.enabled=False
+ row.label( text=col.name )
+
+ row = box.row()
+ row.enabled=False
+ row.alignment = 'CENTER'
+ row.scale_y = 1.5
+ row.label( text="This collection is not in the export group" )
+ #}
+
+ box = _.layout.box()
+ row = box.row()
+
+ split = row.split( factor=0.3, align=True )
+ split.prop( context.scene.SR_data, "use_hidden", text="hidden" )
+
+ row1 = split.row()
+ if export_count == 0:
+ row1.enabled=False
+ row1.operator( "skaterift.compile_all", \
+ text=F"Compile all ({export_count} collections)" )
+ #}
+ elif context.scene.SR_data.panel == 'ENTITY': #{
+ active_object = context.active_object
+ if not active_object: return
+
+ _.layout.operator( 'skaterift.copy_entity_data', \
+ text=F'Copy entity data to {len(context.selected_objects)-1} '+\
+ F'other objects' )
+
+ box = _.layout.box()
+ row = box.row()
+ row.alignment = 'CENTER'
+ row.label( text=active_object.name )
+ row.scale_y = 1.5
+
+ def _draw_prop_collection( data ): #{
+ nonlocal box
+ row = box.row()
+ row.alignment = 'CENTER'
+ row.enabled = False
+ row.scale_y = 1.5
+ row.label( text=F'{data[0]}' )
+
+ if hasattr(type(data[0]),'sr_inspector'):#{
+ type(data[0]).sr_inspector( box, data )
+ #}
+ else:#{
+ for a in data[0].__annotations__:
+ box.prop( data[0], a )
+ #}
+ #}
+
+ if active_object.type == 'ARMATURE': #{
+ if active_object.mode == 'POSE': #{
+ bones = active_object.data.bones
+ mb = sr_get_mirror_bone( bones )
+ if mb:#{
+ box.operator( "skaterift.mirror_bone", \
+ text=F'Mirror attributes to {mb.name}' )
+ #}
+
+ _draw_prop_collection( [bones.active.SR_data ] )
+ #}
+ else: #{
+ row = box.row()
+ row.alignment='CENTER'
+ row.scale_y=2.0
+ row.enabled=False
+ row.label( text="Enter pose mode to modify bone properties" )
+ #}
+ #}
+ elif active_object.type == 'LIGHT': #{
+ _draw_prop_collection( [active_object.data.SR_data] )
+ #}
+ elif active_object.type in ['EMPTY','CURVE','MESH']:#{
+ box.prop( active_object.SR_data, "ent_type" )
+ ent_type = active_object.SR_data.ent_type
+
+ col = getattr( active_object.SR_data, ent_type, None )
+ if col != None and len(col)!=0: _draw_prop_collection( col )
+
+ if active_object.type == 'MESH':#{
+ col = getattr( active_object.data.SR_data, ent_type, None )
+ if col != None and len(col)!=0: _draw_prop_collection( col )
+ #}
+ #}
+ #}
+ #}
+#}
+
+class SR_MATERIAL_PANEL(bpy.types.Panel):
+#{
+ bl_label="Skate Rift material"
+ bl_idname="MATERIAL_PT_sr_material"
+ bl_space_type='PROPERTIES'
+ bl_region_type='WINDOW'
+ bl_context="material"
+
+ def draw(_,context):
+ #{
+ active_object = bpy.context.active_object
+ if active_object == None: return
+ active_mat = active_object.active_material
+ if active_mat == None: return
+
+ info = material_info( active_mat )
+
+ if 'tex_diffuse' in info:#{
+ _.layout.label( icon='INFO', \
+ text=F"{info['tex_diffuse'].name} will be compiled" )
+ #}
+
+ _.layout.prop( active_mat.SR_data, "shader" )
+ _.layout.prop( active_mat.SR_data, "surface_prop" )
+ _.layout.prop( active_mat.SR_data, "collision" )
+
+ if active_mat.SR_data.collision:#{
+ box = _.layout.box()
+ row = box.row()
+
+ if (active_mat.SR_data.shader != 'invisible') and \
+ (active_mat.SR_data.shader != 'boundary'):#{
+ row.prop( active_mat.SR_data, "skate_surface" )
+ row.prop( active_mat.SR_data, "grind_surface" )
+ row.prop( active_mat.SR_data, "grow_grass" )
+ row.prop( active_mat.SR_data, "preview_visibile" )
+ #}
+ #}
+
+ if active_mat.SR_data.shader == "terrain_blend":#{
+ box = _.layout.box()
+ box.prop( active_mat.SR_data, "blend_offset" )
+ box.prop( active_mat.SR_data, "sand_colour" )
+ #}
+ elif active_mat.SR_data.shader == "vertex_blend":#{
+ box = _.layout.box()
+ box.label( icon='INFO', text="Uses vertex colours, the R channel" )
+ box.prop( active_mat.SR_data, "blend_offset" )
+ #}
+ elif active_mat.SR_data.shader == "water":#{
+ box = _.layout.box()
+ box.label( icon='INFO', text="Depth scale of 16 meters" )
+ box.prop( active_mat.SR_data, "shore_colour" )
+ box.prop( active_mat.SR_data, "ocean_colour" )
+ #}
+ #}
+#}
+
+def sr_get_type_enum( scene, context ):
+#{
+ items = [('none','None',"")]
+ mesh_entities=['ent_gate','ent_water']
+ point_entities=['ent_spawn','ent_route_node','ent_route']
+
+ for e in point_entities: items += [(e,e,'')]
+
+ if context.scene.SR_data.panel == 'ENTITY': #{
+ if context.active_object.type == 'MESH': #{
+ for e in mesh_entities: items += [(e,e,'')]
+ #}
+ #}
+ else: #{
+ for e in mesh_entities: items += [(e,e,'')]
+ #}
+
+ return items
+#}
+
+def sr_on_type_change( _, context ):
+#{
+ obj = context.active_object
+ ent_type = obj.SR_data.ent_type
+ if ent_type == 'none': return
+ if obj.type == 'MESH':#{
+ col = getattr( obj.data.SR_data, ent_type, None )
+ if col != None and len(col)==0: col.add()
+ #}
+
+ col = getattr( obj.SR_data, ent_type, None )
+ if col != None and len(col)==0: col.add()
+#}
+
+class SR_OBJECT_ENT_SPAWN(bpy.types.PropertyGroup):
+#{
+ alias: bpy.props.StringProperty( name='alias' )
+#}
+
+class SR_OBJECT_ENT_GATE(bpy.types.PropertyGroup):
+#{
+ target: bpy.props.PointerProperty( \
+ type=bpy.types.Object, name="destination", \
+ poll=lambda self,obj: sr_filter_ent_type(obj,['ent_gate']))
+
+ key: bpy.props.StringProperty()
+ tipo: bpy.props.EnumProperty(items=(('default', 'Default', ""),
+ ('nonlocal', 'Non-Local', ""),))
+
+ @staticmethod
+ def sr_inspector( layout, data ):
+ #{
+ box = layout.box()
+ box.prop( data[0], 'tipo', text="subtype" )
+
+ if data[0].tipo == 'default': box.prop( data[0], 'target' )
+ elif data[0].tipo == 'nonlocal': box.prop( data[0], 'key' )
+ #}
+#}
+
+class SR_MESH_ENT_GATE(bpy.types.PropertyGroup):
+#{
+ dimensions: bpy.props.FloatVectorProperty(name="dimensions",size=3)
+#}
+
+class SR_OBJECT_ENT_ROUTE_ENTRY(bpy.types.PropertyGroup):
+#{
+ target: bpy.props.PointerProperty( \
+ type=bpy.types.Object, name='target', \
+ poll=lambda self,obj: sr_filter_ent_type(obj,['ent_gate']))
+#}
+
+class SR_UL_ROUTE_NODE_LIST(bpy.types.UIList):
+#{
+ bl_idname = 'SR_UL_ROUTE_NODE_LIST'
+
+ def draw_item(_,context,layout,data,item,icon,active_data,active_propname):
+ #{
+ layout.prop( item, 'target', text='', emboss=False )
+ #}
+#}
+
+def internal_listdel_execute(self,context,ent_name,collection_name):
+#{
+ active_object = context.active_object
+ data = getattr(active_object.SR_data,ent_name)[0]
+ lista = getattr(data,collection_name)
+ index = getattr(data,F'{collection_name}_index')
+
+ lista.remove(index)
+
+ setattr(data,F'{collection_name}_index', min(max(0,index-1), len(lista)-1))
+ return{'FINISHED'}
+#}
+
+def internal_listadd_execute(self,context,ent_name,collection_name):
+#{
+ active_object = context.active_object
+ getattr(getattr(active_object.SR_data,ent_name)[0],collection_name).add()
+ return{'FINISHED'}
+#}
+
+def copy_propgroup( de, to ):
+#{
+ for a in de.__annotations__:#{
+ if isinstance(getattr(de,a), bpy.types.bpy_prop_collection):#{
+ ca = getattr(de,a)
+ cb = getattr(to,a)
+
+ while len(cb) != len(ca):#{
+ if len(cb) < len(ca): cb.add()
+ else: cb.remove(0)
+ #}
+ for i in range(len(ca)):#{
+ copy_propgroup(ca[i],cb[i])
+ #}
+ #}
+ else:#{
+ setattr(to,a,getattr(de,a))
+ #}
+ #}
+#}
+
+class SR_OT_COPY_ENTITY_DATA(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.copy_entity_data"
+ bl_label = "Copy entity data"
+
+ def execute(self, context):#{
+ data = context.active_object.SR_data
+ new_type = data.ent_type
+ print( F"Copy entity data from: {context.active_object.name}" )
+
+ for obj in context.selected_objects:#{
+ if obj != context.active_object:#{
+ print( F" To: {obj.name}" )
+
+ obj.SR_data.ent_type = new_type
+
+ if active_object.type == 'MESH':#{
+ col = getattr( obj.data.SR_data, new_type, None )
+ if col != None and len(col)==0: col.add()
+ mdata = context.active_object.data.SR_data
+ copy_propgroup( getattr(mdata,new_type)[0], col[0] )
+ #}
+
+ col = getattr( obj.SR_data, new_type, None )
+ if col != None and len(col)==0: col.add()
+ copy_propgroup( getattr(data,new_type)[0], col[0] )
+ #}
+ #}
+ return{'FINISHED'}
+ #}
+#}
+
+class SR_OT_ROUTE_LIST_NEW_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.new_entry"
+ bl_label = "Add gate"
+
+ def execute(self, context):#{
+ return internal_listadd_execute(self,context,'ent_route','gates')
+ #}
+#}
+
+class SR_OT_ROUTE_LIST_DEL_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.del_entry"
+ bl_label = "Remove gate"
+
+ @classmethod
+ def poll(cls, context):#{
+ active_object = context.active_object
+ if obj_ent_type(active_object) == 'ent_route':#{
+ return active_object.SR_data.ent_route[0].gates
+ #}
+ else: return False
+ #}
+
+ def execute(self, context):#{
+ return internal_listdel_execute(self,context,'ent_route','gates')
+ #}
+#}
+
+class SR_OT_AUDIO_LIST_NEW_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.al_new_entry"
+ bl_label = "Add file"
+
+ def execute(self, context):#{
+ return internal_listadd_execute(self,context,'ent_audio','files')
+ #}
+#}
+
+class SR_OT_AUDIO_LIST_DEL_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.al_del_entry"
+ bl_label = "Remove file"
+
+ @classmethod
+ def poll(cls, context):#{
+ active_object = context.active_object
+ if obj_ent_type(active_object) == 'ent_audio':#{
+ return active_object.SR_data.ent_audio[0].files
+ #}
+ else: return False
+ #}
+
+ def execute(self, context):#{
+ return internal_listdel_execute(self,context,'ent_audio','files')
+ return{'FINISHED'}
+ #}
+#}
+
+class SR_OT_GLYPH_LIST_NEW_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.gl_new_entry"
+ bl_label = "Add glyph"
+
+ def execute(self, context):#{
+ active_object = context.active_object
+
+ font = active_object.SR_data.ent_font[0]
+ font.glyphs.add()
+
+ if len(font.glyphs) > 1:#{
+ prev = font.glyphs[-2]
+ cur = font.glyphs[-1]
+
+ cur.bounds = prev.bounds
+ cur.utf32 = prev.utf32+1
+ #}
+
+ return{'FINISHED'}
+ #}
+#}
+
+class SR_OT_GLYPH_LIST_DEL_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.gl_del_entry"
+ bl_label = "Remove Glyph"
+
+ @classmethod
+ def poll(cls, context):#{
+ active_object = context.active_object
+ if obj_ent_type(active_object) == 'ent_font':#{
+ return active_object.SR_data.ent_font[0].glyphs
+ #}
+ else: return False
+ #}
+
+ def execute(self, context):#{
+ return internal_listdel_execute(self,context,'ent_font','glyphs')
+ #}
+#}
+
+class SR_OT_GLYPH_LIST_MOVE_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.gl_move_item"
+ bl_label = "aa"
+ direction: bpy.props.EnumProperty(items=(('UP', 'Up', ""),
+ ('DOWN', 'Down', ""),))
+
+ @classmethod
+ def poll(cls, context):#{
+ active_object = context.active_object
+ if obj_ent_type(active_object) == 'ent_font':#{
+ return active_object.SR_data.ent_font[0].glyphs
+ #}
+ else: return False
+ #}
+
+ def execute(_, context):#{
+ active_object = context.active_object
+ data = active_object.SR_data.ent_font[0]
+
+ index = data.glyphs_index
+ neighbor = index + (-1 if _.direction == 'UP' else 1)
+ data.glyphs.move( neighbor, index )
+
+ list_length = len(data.glyphs) - 1
+ new_index = index + (-1 if _.direction == 'UP' else 1)
+
+ data.glyphs_index = max(0, min(new_index, list_length))
+
+ return{'FINISHED'}
+ #}
+#}
+
+class SR_OT_FONT_VARIANT_LIST_NEW_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.fv_new_entry"
+ bl_label = "Add variant"
+
+ def execute(self, context):#{
+ return internal_listadd_execute(self,context,'ent_font','variants')
+ #}
+#}
+
+class SR_OT_FONT_VARIANT_LIST_DEL_ITEM(bpy.types.Operator):
+#{
+ bl_idname = "skaterift.fv_del_entry"
+ bl_label = "Remove variant"
+
+ @classmethod
+ def poll(cls, context):#{
+ active_object = context.active_object
+ if obj_ent_type(active_object) == 'ent_font':#{
+ return active_object.SR_data.ent_font[0].variants
+ #}
+ else: return False
+ #}
+
+ def execute(self, context):#{
+ return internal_listdel_execute(self,context,'ent_font','variants')
+ #}
+#}
+
+class SR_OBJECT_ENT_AUDIO_FILE_ENTRY(bpy.types.PropertyGroup):
+#{
+ path: bpy.props.StringProperty( name="Path" )
+ probability: bpy.props.FloatProperty( name="Probability",default=100.0 )
+#}
+
+class SR_UL_AUDIO_LIST(bpy.types.UIList):
+#{
+ bl_idname = 'SR_UL_AUDIO_LIST'
+
+ def draw_item(_,context,layout,data,item,icon,active_data,active_propname):
+ #{
+ split = layout.split(factor=0.7)
+ c = split.column()
+ c.prop( item, 'path', text='', emboss=False )
+ c = split.column()
+ c.prop( item, 'probability', text='%', emboss=True )
+ #}
+#}
+
+class SR_UL_FONT_VARIANT_LIST(bpy.types.UIList):
+#{
+ bl_idname = 'SR_UL_FONT_VARIANT_LIST'
+
+ def draw_item(_,context,layout,data,item,icon,active_data,active_propname):
+ #{
+ layout.prop( item, 'mesh', emboss=False )
+ layout.prop( item, 'tipo' )
+ #}
+#}
+
+class SR_UL_FONT_GLYPH_LIST(bpy.types.UIList):
+#{
+ bl_idname = 'SR_UL_FONT_GLYPH_LIST'
+
+ def draw_item(_,context,layout,data,item,icon,active_data,active_propname):
+ #{
+ s0 = layout.split(factor=0.3)
+ c = s0.column()
+ s1 = c.split(factor=0.3)
+ c = s1.column()
+ row = c.row()
+ lbl = chr(item.utf32) if item.utf32 >= 32 and item.utf32 <= 126 else \
+ f'x{item.utf32:x}'
+ row.label(text=lbl)
+ c = s1.column()
+ c.prop( item, 'utf32', text='', emboss=True )
+ c = s0.column()
+ row = c.row()
+ row.prop( item, 'bounds', text='', emboss=False )
+ #}
+#}
+
+class SR_OBJECT_ENT_ROUTE(bpy.types.PropertyGroup):
+#{
+ gates: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_ROUTE_ENTRY)
+ gates_index: bpy.props.IntProperty()
+
+ colour: bpy.props.FloatVectorProperty( \
+ name="Colour",\
+ subtype='COLOR',\
+ min=0.0,max=1.0,\
+ default=Vector((0.79,0.63,0.48)),\
+ description="Route colour"\
+ )
+
+ alias: bpy.props.StringProperty(\
+ name="Alias",\
+ default="Untitled Course")
+
+ @staticmethod
+ def sr_inspector( layout, data ):
+ #{
+ layout.prop( data[0], 'alias' )
+ layout.prop( data[0], 'colour' )
+
+ layout.label( text='Checkpoints' )
+ layout.template_list('SR_UL_ROUTE_NODE_LIST', 'Checkpoints', \
+ data[0], 'gates', data[0], 'gates_index', rows=5)
+
+ row = layout.row()
+ row.operator( 'skaterift.new_entry', text='Add' )
+ row.operator( 'skaterift.del_entry', text='Remove' )
+ #}
+#}
+
+class SR_OBJECT_ENT_VOLUME(bpy.types.PropertyGroup):
+#{
+ subtype: bpy.props.EnumProperty(
+ name="Subtype",
+ items=[('0','Trigger',''),
+ ('1','Particles (0.1s)','')]
+ )
+
+ target: bpy.props.PointerProperty( \
+ type=bpy.types.Object, name="Target", \
+ poll=lambda self,obj: sr_filter_ent_type(obj,\
+ ['ent_audio','ent_skateshop','ent_ccmd']))
+
+ @staticmethod
+ def sr_inspector( layout, data ):
+ #{
+ data = data[0]
+ layout.prop( data, 'subtype' )
+ layout.prop( data, 'target' )
+ #}
+#}
+
+class SR_OBJECT_ENT_AUDIO(bpy.types.PropertyGroup):
+#{
+ files: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_AUDIO_FILE_ENTRY)
+ files_index: bpy.props.IntProperty()
+
+ flag_3d: bpy.props.BoolProperty( name="3D audio",default=True )
+ flag_loop: bpy.props.BoolProperty( name="Loop",default=False )
+ flag_auto: bpy.props.BoolProperty( name="Play at start",default=False )
+ flag_nodoppler: bpy.props.BoolProperty( name="No Doppler",default=False )
+
+ group: bpy.props.IntProperty( name="Group ID", default=0 )
+ formato: bpy.props.EnumProperty(
+ name="Format",
+ items=[('0','Uncompressed Mono',''),
+ ('1','Compressed Vorbis',''),
+ ('2','[vg] Bird Synthesis','')]
+ )
+ probability_curve: bpy.props.EnumProperty(
+ name="Probability Curve",
+ items=[('0','Constant',''),
+ ('1','Wildlife Daytime',''),
+ ('2','Wildlife Nighttime','')])
+ channel_behaviour: bpy.props.EnumProperty(
+ name="Channel Behaviour",
+ items=[('0','Unlimited',''),
+ ('1','Discard if group full', ''),
+ ('2','Crossfade if group full','')])
+
+ transition_duration: bpy.props.FloatProperty(name="Transition Time",\
+ default=0.2)
+
+ max_channels: bpy.props.IntProperty( name="Max Channels", default=1 )
+ volume: bpy.props.FloatProperty( name="Volume",default=1.0 )
+
+ @staticmethod
+ def sr_inspector( layout, data ):