X-Git-Url: https://harrygodden.com/git/?a=blobdiff_plain;f=blender_export.py;h=dee0d8f1ee0102774ce2f48da898b035f46f9bc6;hb=a1056ed8198f0f5be0e0f341da8bd49aa6c47198;hp=3e4704ea466fa1e6d8d97eb3a4f44b6bb0dee6f2;hpb=0136a935c00e3ea1f231fd88b38b44982fd409ac;p=carveJwlIkooP6JGAAIwe30JlM.git diff --git a/blender_export.py b/blender_export.py index 3e4704e..dee0d8f 100644 --- a/blender_export.py +++ b/blender_export.py @@ -1,22 +1,3 @@ -# -# ============================================================================= -# -# Copyright . . . -----, ,----- ,---. .---. -# 2021-2023 |\ /| | / | | | | /| -# | \ / | +-- / +----- +---' | / | -# | \ / | | / | | \ | / | -# | \/ | | / | | \ | / | -# ' ' '--' [] '----- '----- ' ' '---' SOFTWARE -# -# ============================================================================= -# -# Python exporter for Blender, compiles .mdl format for Skate Rift. -# -# Its really slow, sorry, I don't know how to speed it up. -# Also not sure why you need to put # before {} in code blocks, there is errors -# otherwise -# - import bpy, math, gpu, os import cProfile from ctypes import * @@ -24,12 +5,12 @@ from mathutils import * from gpu_extras.batch import batch_for_shader bl_info = { - "name":"Skate Rift model compiler", + "name":"Skaterift .mdl exporter", "author": "Harry Godden (hgn)", "version": (0,2), "blender":(3,1,0), "location":"Export", - "descriptin":"", + "description":"", "warning":"", "wiki_url":"", "category":"Import/Export", @@ -46,9 +27,15 @@ class mdl_vert(Structure): # 48 bytes. Quite large. Could compress ("groups",c_uint8*4)] #} +class mdl_transform(Structure): +#{ + _fields_ = [("co",c_float*3), + ( "s",c_float*3), + ( "q",c_float*4)] +#} + class mdl_submesh(Structure): #{ - _pack_ = 1 _fields_ = [("indice_start",c_uint32), ("indice_count",c_uint32), ("vertex_start",c_uint32), @@ -57,17 +44,8 @@ class mdl_submesh(Structure): ("material_id",c_uint32)] # index into the material array #} -class mdl_texture(Structure): -#{ - _pack_ = 1 - _fields_ = [("pstr_name",c_uint32), - ("pack_offset",c_uint32), - ("pack_length",c_uint32)] -#} - class mdl_material(Structure): #{ - _pack_ = 1 _fields_ = [("pstr_name",c_uint32), ("shader",c_uint32), ("flags",c_uint32), @@ -75,2063 +53,1745 @@ class mdl_material(Structure): ("colour",c_float*4), ("colour1",c_float*4), ("tex_diffuse",c_uint32), - ("tex_decal",c_uint32), - ("tex_normal",c_uint32)] + ("tex_none0",c_uint32), + ("tex_none1",c_uint32)] #} -class mdl_node(Structure): +class mdl_bone(Structure): #{ - _pack_ = 1 - _fields_ = [("co",c_float*3), - ( "q",c_float*4), - ( "s",c_float*3), - ("sub_uid",c_uint32), # dont use - ("submesh_start",c_uint32), - ("submesh_count",c_uint32), - ("classtype",c_uint32), - ("offset",c_uint32), + _fields_ = [("co",c_float*3),("end",c_float*3), ("parent",c_uint32), - ("pstr_name",c_uint32)] + ("collider",c_uint32), + ("ik_target",c_uint32), + ("ik_pole",c_uint32), + ("flags",c_uint32), + ("pstr_name",c_uint32), + ("hitbox",(c_float*3)*2), + ("conevx",c_float*3),("conevy",c_float*3),("coneva",c_float*3), + ("conet",c_float)] #} -class mdl_header(Structure): +class mdl_armature(Structure): #{ - _pack_ = 1 - _fields_ = [("identifier",c_uint32), - ("version",c_uint32), - ("file_length",c_uint32), - ("pad0",c_uint32), + _fields_ = [("transform",mdl_transform), + ("bone_start",c_uint32), + ("bone_count",c_uint32), + ("anim_start",c_uint32), + ("anim_count",c_uint32)] +#} - ("node_count",c_uint32), - ("node_offset",c_uint32), +class mdl_animation(Structure): +#{ + _fields_ = [("pstr_name",c_uint32), + ("length",c_uint32), + ("rate",c_float), + ("keyframe_start",c_uint32)] +#} +class mdl_mesh(Structure): +#{ + _fields_ = [("transform",mdl_transform), + ("submesh_start",c_uint32), ("submesh_count",c_uint32), - ("submesh_offset",c_uint32), - - ("material_count",c_uint32), - ("material_offset",c_uint32), + ("pstr_name",c_uint32), + ("flags",c_uint32), + ("armature_id",c_uint32)] +#} - ("texture_count",c_uint32), - ("texture_offset",c_uint32), +class mdl_file(Structure): +#{ + _fields_ = [("path",c_uint32), + ("pack_offset",c_uint32), + ("pack_size",c_uint32)] +#} - ("anim_count",c_uint32), - ("anim_offset",c_uint32), +class mdl_texture(Structure): +#{ + _fields_ = [("file",mdl_file), + ("type",c_uint32)] +#} - ("entdata_size",c_uint32), - ("entdata_offset",c_uint32), - - ("strings_size",c_uint32), - ("strings_offset",c_uint32), +class mdl_array(Structure): +#{ + _fields_ = [("file_offset",c_uint32), + ("item_count",c_uint32), + ("item_size",c_uint32), + ("name",c_byte*16)] +#} - ("keyframe_count",c_uint32), - ("keyframe_offset",c_uint32), +class mdl_header(Structure): +#{ + _fields_ = [("version",c_uint32), + ("arrays",mdl_array)] +#} - ("vertex_count",c_uint32), - ("vertex_offset",c_uint32), +class ent_spawn(Structure): +#{ + _fields_ = [("transform",mdl_transform), + ("pstr_name",c_uint32)] +#} - ("indice_count",c_uint32), - ("indice_offset",c_uint32), +class ent_light(Structure): +#{ + _fields_ = [("transform",mdl_transform), + ("daytime",c_uint32), + ("type",c_uint32), + ("colour",c_float*4), + ("angle",c_float), + ("range",c_float), + ("inverse_world",(c_float*3)*4), # Runtime + ("angle_sin_cos",(c_float*2))] # Runtime +#} - ("pack_size",c_uint32), - ("pack_offset",c_uint32)] +class version_refcount_union(Union): +#{ + _fields_ = [("timing_version",c_uint32), + ("ref_count",c_uint8)] #} -class mdl_animation(Structure): +class ent_gate(Structure): #{ - _pack_ = 1 - _fields_ = [("pstr_name",c_uint32), - ("length",c_uint32), - ("rate",c_float), - ("offset",c_uint32)] + _fields_ = [("type",c_uint32), + ("target", c_uint32), + ("dimensions", c_float*3), + ("co", (c_float*3)*2), + ("q", (c_float*4)*2), + ("to_world",(c_float*3)*4), + ("transport",(c_float*3)*4), + ("_anonymous_union",version_refcount_union), + ("timing_time",c_double), + ("routes",c_uint16*4)] #} -class mdl_keyframe(Structure): +class ent_route_node(Structure): #{ - _pack_ = 1 _fields_ = [("co",c_float*3), - ("q",c_float*4), - ("s",c_float*3)] + ("ref_count",c_uint8), + ("ref_total",c_uint8)] #} -# ---------------------------------------------------------------------------- # -# # -# Entity definitions # -# # -# ---------------------------------------------------------------------------- # -# -# ctypes _fields_ defines the data which is filled in by: -# def encode_obj( _, node, node_def ): -# -# gizmos get drawn into the viewport via: -# @staticmethod -# def draw_scene_helpers( obj ): -# -# editor enterface, simiraliy: -# @staticmethod -# def editor_interface( layout, obj ): -# +class ent_path_index(Structure): +#{ + _fields_ = [("index",c_uint16)] +#} -# Classtype 1 -# -# Purpose: A rift. must target another gate, the target gate can not have more -# than one target nodes of its own. -# -class classtype_gate(Structure): +class ent_checkpoint(Structure): #{ - _pack_ = 1 - _fields_ = [("target",c_uint32), - ("dims",c_float*3)] + _fields_ = [("gate_index",c_uint16), + ("path_start",c_uint16), + ("path_count",c_uint16)] +#} - def encode_obj(_, node,node_def): - #{ - node.classtype = 1 +class ent_route(Structure): +#{ + _fields_ = [("transform",mdl_transform), + ("pstr_name",c_uint32), + ("checkpoints_start",c_uint16), + ("checkpoints_count",c_uint16), + ("colour",c_float*4), + ("active",c_uint32), #runtime + ("factive",c_float), + ("board_transform",(c_float*3)*4), + ("sm",mdl_submesh), + ("latest_pass",c_double)] +#} - obj = node_def['obj'] +def obj_ent_type( obj ): +#{ + if obj.type == 'ARMATURE': return 'mdl_armature' + elif obj.type == 'LIGHT': return 'ent_light' + else: return obj.SR_data.ent_type +#} - if obj.cv_data.target != None: - _.target = obj.cv_data.target.cv_data.uid +def sr_filter_ent_type( obj, ent_type ): +#{ + if obj == bpy.context.active_object: return False - if obj.type == 'MESH': - #{ - _.dims[0] = obj.data.cv_data.v0[0] - _.dims[1] = obj.data.cv_data.v0[1] - _.dims[2] = obj.data.cv_data.v0[2] - #} - else: - #{ - _.dims[0] = obj.cv_data.v0[0] - _.dims[1] = obj.cv_data.v0[1] - _.dims[2] = obj.cv_data.v0[2] + for c0 in obj.users_collection:#{ + for c1 in bpy.context.active_object.users_collection:#{ + if c0 == c1:#{ + return ent_type == obj_ent_type( obj ) + #} #} #} - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours - - if obj.type == 'MESH': - dims = obj.data.cv_data.v0 - else: - dims = obj.cv_data.v0 - - vs = [None]*9 - c = Vector((0,0,dims[2])) - - vs[0] = obj.matrix_world @ Vector((-dims[0],0.0,-dims[1]+dims[2])) - vs[1] = obj.matrix_world @ Vector((-dims[0],0.0, dims[1]+dims[2])) - vs[2] = obj.matrix_world @ Vector(( dims[0],0.0, dims[1]+dims[2])) - vs[3] = obj.matrix_world @ Vector(( dims[0],0.0,-dims[1]+dims[2])) - vs[4] = obj.matrix_world @ (c+Vector((-1,0,-2))) - vs[5] = obj.matrix_world @ (c+Vector((-1,0, 2))) - vs[6] = obj.matrix_world @ (c+Vector(( 1,0, 2))) - vs[7] = obj.matrix_world @ (c+Vector((-1,0, 0))) - vs[8] = obj.matrix_world @ (c+Vector(( 1,0, 0))) - - indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(7,8)] + return False +#} - for l in indices: - #{ - v0 = vs[l[0]] - v1 = vs[l[1]] - cv_view_verts += [(v0[0],v0[1],v0[2])] - cv_view_verts += [(v1[0],v1[1],v1[2])] - cv_view_colours += [(1,1,0,1),(1,1,0,1)] - #} +def compile_obj_transform( obj, transform ): +#{ + co = obj.matrix_world @ Vector((0,0,0)) + q = obj.matrix_local.to_quaternion() + s = obj.scale + + # Setup transform + # + transform.co[0] = co[0] + transform.co[1] = co[2] + transform.co[2] = -co[1] + transform.q[0] = q[1] + transform.q[1] = q[3] + transform.q[2] = -q[2] + transform.q[3] = q[0] + transform.s[0] = s[0] + transform.s[1] = s[2] + transform.s[2] = s[1] +#} - sw = (0.4,0.4,0.4,0.2) - if obj.cv_data.target != None: - cv_draw_arrow( obj.location, obj.cv_data.target.location, sw ) - #} +def int_align_to( v, align ): +#{ + while(v%align)!=0: v += 1 + return v +#} - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "target" ) +def bytearray_align_to( buffer, align, w=b'\xaa' ): +#{ + while (len(buffer) % align) != 0: buffer.extend(w) + return buffer +#} - mesh = obj.data - layout.label( text=F"(i) Data is stored in {mesh.name}" ) - layout.prop( mesh.cv_data, "v0", text="Gate dimensions" ) +def bytearray_print_hex( s, w=16 ): +#{ + for r in range((len(s)+(w-1))//w):#{ + i0=(r+0)*w + i1=min((r+1)*w,len(s)) + print( F'{r*w:06x}| \x1B[31m', end='') + print( F"{' '.join('{:02x}'.format(x) for x in s[i0:i1]):<48}",end='' ) + print( "\x1B[0m", end='') + print( ''.join(chr(x) if (x>=33 and x<=126) else '.' for x in s[i0:i1] ) ) #} #} -class classtype_nonlocal_gate(classtype_gate): +def sr_compile_string( s ): #{ - def encode_obj(_,node,node_def): - #{ - node.classtype = 300 - - obj = node_def['obj'] - _.target = encoder_process_pstr( node_def['obj'].cv_data.strp ) - - if obj.type == 'MESH': - #{ - _.dims[0] = obj.data.cv_data.v0[0] - _.dims[1] = obj.data.cv_data.v0[1] - _.dims[2] = obj.data.cv_data.v0[2] - #} - else: - #{ - _.dims[0] = obj.cv_data.v0[0] - _.dims[1] = obj.cv_data.v0[1] - _.dims[2] = obj.cv_data.v0[2] - #} - #} + if s in sr_compile.string_cache: return sr_compile.string_cache[s] + + index = len( sr_compile.string_data ) + sr_compile.string_cache[s] = index + sr_compile.string_data.extend( s.encode('utf-8') ) + sr_compile.string_data.extend( b'\0' ) - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "strp", text="Nonlocal ID" ) + bytearray_align_to( sr_compile.string_data, 4 ) + return index +#} - mesh = obj.data - layout.label( text=F"(i) Data is stored in {mesh.name}" ) - layout.prop( mesh.cv_data, "v0", text="Gate dimensions" ) - #} +def material_tex_image(v): +#{ + return { + "Image Texture": + { + "image": F"{v}" + } + } #} -# Classtype 3 -# -# Purpose: player can reset here, its a safe place -# spawns can share the same name, the closest one will be picked -# -# when the world loads it will pick the one named 'start' first. +cxr_graph_mapping = \ +{ + # Default shader setup + "Principled BSDF": + { + "Base Color": + { + "Image Texture": + { + "image": "tex_diffuse" + }, + "Mix": + { + "A": material_tex_image("tex_diffuse"), + "B": material_tex_image("tex_decal") + }, + }, + "Normal": + { + "Normal Map": + { + "Color": material_tex_image("tex_normal") + } + } + } +} + +# https://harrygodden.com/git/?p=convexer.git;a=blob;f=__init__.py;#l1164 # -class classtype_spawn(Structure): +def material_info(mat): #{ - _pack_ = 1 - _fields_ = [("pstr_alias",c_uint32)] + info = {} - def encode_obj(_, node,node_def): - #{ - node.classtype = 3 - _.pstr_alias = encoder_process_pstr( node_def['obj'].cv_data.strp ) - #} + # Using the cxr_graph_mapping as a reference, go through the shader + # graph and gather all $props from it. + # + def _graph_read( node_def, node=None, depth=0 ):#{ + nonlocal mat + nonlocal info + + # Find rootnodes + # + if node == None:#{ + _graph_read.extracted = [] - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours + for node_idname in node_def:#{ + for n in mat.node_tree.nodes:#{ + if n.name == node_idname:#{ + node_def = node_def[node_idname] + node = n + break + #} + #} + #} + #} - vs = [None]*4 - vs[0] = obj.matrix_world @ Vector((0,0,0)) - vs[1] = obj.matrix_world @ Vector((0,2,0)) - vs[2] = obj.matrix_world @ Vector((0.5,1,0)) - vs[3] = obj.matrix_world @ Vector((-0.5,1,0)) - indices = [(0,1),(1,2),(1,3)] + for link in node_def:#{ + link_def = node_def[link] - for l in indices: - #{ - v0 = vs[l[0]] - v1 = vs[l[1]] - - cv_view_verts += [(v0[0],v0[1],v0[2])] - cv_view_verts += [(v1[0],v1[1],v1[2])] - cv_view_colours += [(0,1,1,1),(0,1,1,1)] - #} + if isinstance( link_def, dict ):#{ + node_link = None + for x in node.inputs:#{ + if isinstance( x, bpy.types.NodeSocketColor ):#{ + if link == x.name:#{ + node_link = x + break + #} + #} + #} - cv_draw_sphere( obj.location, 20.0, [0.1,0,0.9,0.4] ) - #} + if node_link and node_link.is_linked:#{ + # look for definitions for the connected node type + # + from_node = node_link.links[0].from_node + + node_name = from_node.name.split('.')[0] + if node_name in link_def:#{ + from_node_def = link_def[ node_name ] - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "strp", text="Alias" ) + _graph_read( from_node_def, from_node, depth+1 ) + #} + + # No definition! :( + # TODO: Make a warning for this? + #} + else:#{ + if "default" in link_def:#{ + prop = link_def['default'] + info[prop] = node_link.default_value + #} + #} + #} + else:#{ + prop = link_def + info[prop] = getattr( node, link ) + #} + #} #} + + _graph_read( cxr_graph_mapping ) + return info #} -# Classtype 4 -# -# Purpose: Tells the game to draw water HERE, at this entity. -# -class classtype_water(Structure): +def sr_pack_file( file, path, data ): #{ - _pack_ = 1 - _fields_ = [("temp",c_uint32)] + file.path = sr_compile_string( path ) + file.pack_offset = len( sr_compile.pack_data ) + file.pack_size = len( data ) - def encode_obj(_, node,node_def): - #{ - node.classtype = 4 - # no data, spooky - #} + sr_compile.pack_data.extend( data ) + bytearray_align_to( sr_compile.pack_data, 16 ) #} -# Classtype 8 -# -# Purpose: Defines a route node and links to up to two more nodes -# -class classtype_route_node(Structure): +def sr_compile_texture( img ): #{ - _pack_ = 1 - _fields_ = [("target",c_uint32), - ("target1",c_uint32)] - - def encode_obj(_, node,node_def): - #{ - node.classtype = 8 - obj = node_def['obj'] + if img == None: + return 0 - if obj.cv_data.target != None: - _.target = obj.cv_data.target.cv_data.uid - if obj.cv_data.target1 != None: - _.target1 = obj.cv_data.target1.cv_data.uid - #} + name = os.path.splitext( img.name )[0] - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours + if name in sr_compile.texture_cache: + return sr_compile.texture_cache[name] - sw = Vector((0.4,0.4,0.4,0.2)) - sw2 = Vector((1.5,0.2,0.2,0.0)) - if obj.cv_data.target != None: - cv_draw_bpath( obj, obj.cv_data.target, sw, sw ) - if obj.cv_data.target1 != None: - cv_draw_bpath( obj, obj.cv_data.target1, sw, sw ) + texture_index = (len(sr_compile.texture_data)//sizeof(mdl_texture)) +1 - cv_draw_bhandle( obj, 1.0, (0.8,0.8,0.8,1.0) ) - cv_draw_bhandle( obj, -1.0, (0.4,0.4,0.4,1.0) ) + tex = mdl_texture() + tex.type = 0 - p1 = obj.location+ \ - obj.matrix_world.to_quaternion() @ Vector((0,0,-6+1.5)) - cv_draw_arrow( obj.location, p1, sw ) + if sr_compile.pack_textures:#{ + filedata = qoi_encode( img ) + sr_pack_file( tex.file, name, filedata ) #} - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "target", text="Left" ) - layout.prop( obj.cv_data, "target1", text="Right" ) - #} + sr_compile.texture_cache[name] = texture_index + sr_compile.texture_data.extend( bytearray(tex) ) + return texture_index #} -# Classtype 9 -# -# Purpose: Defines a route, its 'starting' point, and the colour to use for it -# -class classtype_route(Structure): +def sr_compile_material( mat ): #{ - _pack_ = 1 - _fields_ = [("id_start",c_uint32), - ("pstr_name",c_uint32), - ("colour",c_float*3)] - - def encode_obj(_, node,node_def): - #{ - node.classtype = 9 - obj = node_def['obj'] + if mat == None: + return 0 + if mat.name in sr_compile.material_cache: + return sr_compile.material_cache[mat.name] - _.colour[0] = obj.cv_data.colour[0] - _.colour[1] = obj.cv_data.colour[1] - _.colour[2] = obj.cv_data.colour[2] - _.pstr_name = encoder_process_pstr( obj.cv_data.strp ) + index = (len(sr_compile.material_data)//sizeof(mdl_material))+1 + sr_compile.material_cache[mat.name] = index - if obj.cv_data.target != None: - _.id_start = obj.cv_data.target.cv_data.uid + m = mdl_material() + m.pstr_name = sr_compile_string( mat.name ) + + flags = 0x00 + if mat.SR_data.collision:#{ + flags |= 0x2 + if mat.SR_data.skate_surface: flags |= 0x1 + if mat.SR_data.grind_surface: flags |= (0x8|0x1) #} - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours, cv_view_course_i - - if obj.cv_data.target: - cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] ) - - # Tries to simulate how we do it in the game - # - stack = [None]*64 - stack_i = [0]*64 - stack[0] = obj.cv_data.target - si = 1 - loop_complete = False - - while si > 0: - #{ - if stack_i[si-1] == 2: - #{ - si -= 1 - continue + if mat.SR_data.grow_grass: flags |= 0x4 + m.flags = flags - if si == 0: # Loop failed to complete - break - #} + m.surface_prop = int(mat.SR_data.surface_prop) - node = stack[si-1] + if mat.SR_data.shader == 'standard': m.shader = 0 + if mat.SR_data.shader == 'standard_cutout': m.shader = 1 + if mat.SR_data.shader == 'terrain_blend':#{ + m.shader = 2 - targets = [None,None] - targets[0] = node.cv_data.target + m.colour[0] = pow( mat.SR_data.sand_colour[0], 1.0/2.2 ) + m.colour[1] = pow( mat.SR_data.sand_colour[1], 1.0/2.2 ) + m.colour[2] = pow( mat.SR_data.sand_colour[2], 1.0/2.2 ) + m.colour[3] = 1.0 - if node.cv_data.classtype == 'classtype_route_node': - #{ - targets[1] = node.cv_data.target1 - #} - - nextnode = targets[stack_i[si-1]] - stack_i[si-1] += 1 - - if nextnode != None: # branch - #{ - if nextnode == stack[0]: # Loop completed - #{ - loop_complete = True - break - #} + m.colour1[0] = mat.SR_data.blend_offset[0] + m.colour1[1] = mat.SR_data.blend_offset[1] + #} - valid=True - for sj in range(si): - #{ - if stack[sj] == nextnode: # invalidated path - #{ - valid=False - break - #} - #} + if mat.SR_data.shader == 'vertex_blend':#{ + m.shader = 3 - if valid: - #{ - stack_i[si] = 0 - stack[si] = nextnode - si += 1 - continue - #} - #} - #} + m.colour1[0] = mat.SR_data.blend_offset[0] + m.colour1[1] = mat.SR_data.blend_offset[1] + #} - if loop_complete: - #{ - cc = Vector((obj.cv_data.colour[0],\ - obj.cv_data.colour[1],\ - obj.cv_data.colour[2],\ - 1.0)) - - for sj in range(si): - #{ - sk = (sj+1)%si - - if stack[sj].cv_data.classtype == 'classtype_gate' and \ - stack[sk].cv_data.classtype == 'classtype_gate': - #{ - dist = (stack[sj].location-stack[sk].location).magnitude - cv_draw_sbpath( stack[sj], stack[sk], cc*0.4, cc, dist, dist ) - #} - else: - cv_draw_bpath( stack[sj], stack[sk], cc, cc ) - #} + if mat.SR_data.shader == 'water':#{ + m.shader = 4 - cv_view_course_i += 1 - #} + m.colour[0] = pow( mat.SR_data.shore_colour[0], 1.0/2.2 ) + m.colour[1] = pow( mat.SR_data.shore_colour[1], 1.0/2.2 ) + m.colour[2] = pow( mat.SR_data.shore_colour[2], 1.0/2.2 ) + m.colour[3] = 1.0 + m.colour1[0] = pow( mat.SR_data.ocean_colour[0], 1.0/2.2 ) + m.colour1[1] = pow( mat.SR_data.ocean_colour[1], 1.0/2.2 ) + m.colour1[2] = pow( mat.SR_data.ocean_colour[2], 1.0/2.2 ) + m.colour1[3] = 1.0 #} + + inf = material_info( mat ) - @staticmethod - def editor_interface( layout, obj ): + if mat.SR_data.shader == 'standard' or \ + mat.SR_data.shader == 'standard_cutout' or \ + mat.SR_data.shader == 'terrain_blend' or \ + mat.SR_data.shader == 'vertex_blend': #{ - layout.prop( obj.cv_data, "target", text="'Start' from" ) - layout.prop( obj.cv_data, "colour" ) - layout.prop( obj.cv_data, "strp", text="Name" ) + if 'tex_diffuse' in inf: + m.tex_diffuse = sr_compile_texture(inf['tex_diffuse']) #} -#} -# Classtype 12 -# -# Purpose: links an mesh node to a type 11 -# -class classtype_skin(Structure): -#{ - _pack_ = 1 - _fields_ = [("skeleton",c_uint32)] - - def encode_obj(_, node,node_def): - #{ - node.classtype = 12 - - armature_def = node_def['linked_armature'] - _.skeleton = armature_def['obj'].cv_data.uid - #} + sr_compile.material_data.extend( bytearray(m) ) + return index #} -# Classtype 11 -# -# Purpose: defines the allocation requirements for a skeleton -# -class classtype_skeleton(Structure): +def sr_armature_bones( armature ): #{ - _pack_ = 1 - _fields_ = [("channels",c_uint32), - ("ik_count",c_uint32), - ("collider_count",c_uint32), - ("anim_start",c_uint32), - ("anim_count",c_uint32)] - - def encode_obj(_, node,node_def): + def _recurse_bone( b ): #{ - node.classtype = 11 - - _.channels = len( node_def['bones'] ) - _.ik_count = node_def['ik_count'] - _.collider_count = node_def['collider_count'] - _.anim_start = node_def['anim_start'] - _.anim_count = node_def['anim_count'] + yield b + for c in b.children: yield from _recurse_bone( c ) #} -#} + for b in armature.data.bones: + if not b.parent: + yield from _recurse_bone( b ) +#} -# Classtype 10 -# -# Purpose: intrinsic bone type, stores collision information and limits too -# -class classtype_bone(Structure): +def sr_compile_mesh( obj ): #{ - _pack_ = 1 - _fields_ = [("flags",c_uint32), - ("ik_target",c_uint32), - ("ik_pole",c_uint32), - ("hitbox",(c_float*3)*2), - ("conevx",c_float*3), - ("conevy",c_float*3), - ("coneva",c_float*3), - ("conet",c_float)] + node=mdl_mesh() + compile_obj_transform(obj, node.transform) + node.pstr_name = sr_compile_string(obj.name) + node.flags = 0 - def encode_obj(_, node,node_def): - #{ - node.classtype = 10 - - armature_def = node_def['linked_armature'] - obj = node_def['bone'] - - _.flags = node_def['deform'] - - if 'ik_target' in node_def: - #{ - _.flags |= 0x2 - _.ik_target = armature_def['bones'].index( node_def['ik_target'] ) - _.ik_pole = armature_def['bones'].index( node_def['ik_pole'] ) - #} - - # For ragdolls - # - if obj.cv_data.collider != 'collider_none': - #{ - if obj.cv_data.collider == 'collider_box': - _.flags |= 0x4 - else: - _.flags |= 0x8 - - _.hitbox[0][0] = obj.cv_data.v0[0] - _.hitbox[0][1] = obj.cv_data.v0[2] - _.hitbox[0][2] = -obj.cv_data.v1[1] - _.hitbox[1][0] = obj.cv_data.v1[0] - _.hitbox[1][1] = obj.cv_data.v1[2] - _.hitbox[1][2] = -obj.cv_data.v0[1] - #} + can_use_cache = True + armature = None - if obj.cv_data.con0: + for mod in obj.modifiers:#{ + if mod.type == 'DATA_TRANSFER' or mod.type == 'SHRINKWRAP' or \ + mod.type == 'BOOLEAN' or mod.type == 'CURVE' or \ + mod.type == 'ARRAY': #{ - _.flags |= 0x100 - _.conevx[0] = obj.cv_data.conevx[0] - _.conevx[1] = obj.cv_data.conevx[2] - _.conevx[2] = -obj.cv_data.conevx[1] - _.conevy[0] = obj.cv_data.conevy[0] - _.conevy[1] = obj.cv_data.conevy[2] - _.conevy[2] = -obj.cv_data.conevy[1] - _.coneva[0] = obj.cv_data.coneva[0] - _.coneva[1] = obj.cv_data.coneva[2] - _.coneva[2] = -obj.cv_data.coneva[1] - _.conet = obj.cv_data.conet + can_use_cache = False #} - #} -#} - -# Classtype 100 -# -# Purpose: sends a signal to another entity -# -class classtype_trigger(Structure): -#{ - _pack_ = 1 - _fields_ = [("target",c_uint32)] - - def encode_obj(_, node,node_def ): - #{ - node.classtype = 100 - if node_def['obj'].cv_data.target: - _.target = node_def['obj'].cv_data.target.cv_data.uid - #} - - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours - cv_draw_ucube( obj.matrix_world, [0,1,0,1] ) - if obj.cv_data.target: - cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] ) - #} + if mod.type == 'ARMATURE': #{ + node.flags = 1 + armature = mod.object + rig_weight_groups = \ + ['0 [ROOT]']+[_.name for _ in sr_armature_bones(mod.object)] + node.armature_id = sr_compile.entity_ids[armature.name] - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "target", text="Triggers" ) + POSE_OR_REST_CACHE = armature.data.pose_position + armature.data.pose_position = 'REST' + #} #} -#} - -# Classtype 101 -# -# Purpose: Gives the player an achievement. -# No cheating! You shouldn't use this entity anyway, since only ME can -# add achievements to the steam ;) -# -class classtype_logic_achievement(Structure): -#{ - _pack_ = 1 - _fields_ = [("pstr_name",c_uint32)] - def encode_obj(_, node,node_def ): - #{ - node.classtype = 101 - _.pstr_name = encoder_process_pstr( node_def['obj'].cv_data.strp ) + # Check the cache first + # + if can_use_cache and (obj.data.name in sr_compile.mesh_cache):#{ + ref = sr_compile.mesh_cache[obj.data.name] + node.submesh_start = ref[0] + node.submesh_count = ref[1] + sr_compile.mesh_data.extend(bytearray(node)) + return #} - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "strp", text="Achievement ID" ) - #} -#} + # Compile a whole new mesh + # + node.submesh_start = len(sr_compile.submesh_data)//sizeof(mdl_submesh) + node.submesh_count = 0 -# Classtype 102 -# -# Purpose: sends a signal to another entity -# -class classtype_logic_relay(Structure): -#{ - _pack_ = 1 - _fields_ = [("targets",c_uint32*4)] + dgraph = bpy.context.evaluated_depsgraph_get() + data = obj.evaluated_get(dgraph).data + data.calc_loop_triangles() + data.calc_normals_split() + + # Mesh is split into submeshes based on their material + # + mat_list = data.materials if len(data.materials) > 0 else [None] + for material_id, mat in enumerate(mat_list): #{ + mref = {} - def encode_obj(_, node,node_def ): - #{ - node.classtype = 102 - obj = node_def['obj'] - if obj.cv_data.target: - _.targets[0] = obj.cv_data.target.cv_data.uid - if obj.cv_data.target1: - _.targets[1] = obj.cv_data.target1.cv_data.uid - if obj.cv_data.target2: - _.targets[2] = obj.cv_data.target2.cv_data.uid - if obj.cv_data.target3: - _.targets[3] = obj.cv_data.target3.cv_data.uid - #} + sm = mdl_submesh() + sm.indice_start = len(sr_compile.indice_data)//sizeof(c_uint32) + sm.vertex_start = len(sr_compile.vertex_data)//sizeof(mdl_vert) + sm.vertex_count = 0 + sm.indice_count = 0 + sm.material_id = sr_compile_material( mat ) - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours - - if obj.cv_data.target: - cv_draw_arrow( obj.location, obj.cv_data.target.location, [1,1,1,1] ) - if obj.cv_data.target1: - cv_draw_arrow( obj.location, obj.cv_data.target1.location, [1,1,1,1] ) - if obj.cv_data.target2: - cv_draw_arrow( obj.location, obj.cv_data.target2.location, [1,1,1,1] ) - if obj.cv_data.target3: - cv_draw_arrow( obj.location, obj.cv_data.target3.location, [1,1,1,1] ) - #} + INF=99999999.99999999 + for i in range(3):#{ + sm.bbx[0][i] = INF + sm.bbx[1][i] = -INF + #} + + # Keep a reference to very very very similar vertices + # i have no idea how to speed it up. + # + vertex_reference = {} - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "target", text="Triggers" ) - layout.prop( obj.cv_data, "target1", text="Triggers" ) - layout.prop( obj.cv_data, "target2", text="Triggers" ) - layout.prop( obj.cv_data, "target3", text="Triggers" ) - #} -#} + # Write the vertex / indice data + # + for tri_index, tri in enumerate(data.loop_triangles):#{ + if tri.material_index != material_id: + continue -# Classtype 14 -# -# Purpose: Plays some audio (44100hz .ogg vorbis only) -# NOTE: There is a 32mb limit on the audio buffer, world audio is -# decompressed and stored in signed 16 bit integers (2 bytes) -# per sample. -# -# volume: not used if has 3D flag -# flags: -# AUDIO_FLAG_LOOP 0x1 -# AUDIO_FLAG_ONESHOT 0x2 (DONT USE THIS, it breaks semaphores) -# AUDIO_FLAG_SPACIAL_3D 0x4 (Probably what you want) -# AUDIO_FLAG_AUTO_START 0x8 (Play when the world starts) -# ...... -# the rest are just internal flags, only use the above 3. -# -class classtype_audio(Structure): -#{ - _pack_ = 1 - _fields_ = [("pstr_file",c_uint32), - ("flags",c_uint32), - ("volume",c_float)] + for j in range(3):#{ + vert = data.vertices[tri.vertices[j]] + li = tri.loops[j] + vi = data.loops[li].vertex_index + + # Gather vertex information + # + co = vert.co + norm = data.loops[li].normal + uv = (0,0) + colour = (255,255,255,255) + groups = [0,0,0,0] + weights = [0,0,0,0] - def encode_obj(_, node,node_def ): - #{ - node.classtype = 14 + # Uvs + # + if data.uv_layers: + uv = data.uv_layers.active.data[li].uv + + # Vertex Colours + # + if data.vertex_colors:#{ + colour = data.vertex_colors.active.data[li].color + colour = (int(colour[0]*255.0),\ + int(colour[1]*255.0),\ + int(colour[2]*255.0),\ + int(colour[3]*255.0)) + #} + + # Weight groups: truncates to the 3 with the most influence. The + # fourth bone ID is never used by the shader so it + # is always 0 + # + if armature:#{ + src_groups = [_ for _ in data.vertices[vi].groups \ + if obj.vertex_groups[_.group].name in \ + rig_weight_groups ] - obj = node_def['obj'] + weight_groups = sorted( src_groups, key = \ + lambda a: a.weight, reverse=True ) + tot = 0.0 + for ml in range(3):#{ + if len(weight_groups) > ml:#{ + g = weight_groups[ml] + name = obj.vertex_groups[g.group].name + weight = g.weight + weights[ml] = weight + groups[ml] = rig_weight_groups.index(name) + tot += weight + #} + #} + + if len(weight_groups) > 0:#{ + inv_norm = (1.0/tot) * 65535.0 + for ml in range(3):#{ + weights[ml] = int( weights[ml] * inv_norm ) + weights[ml] = min( weights[ml], 65535 ) + weights[ml] = max( weights[ml], 0 ) + #} + #} + #} + else:#{ + li1 = tri.loops[(j+1)%3] + vi1 = data.loops[li1].vertex_index + e0 = data.edges[ data.loops[li].edge_index ] - _.pstr_file = encoder_process_pstr( obj.cv_data.strp ) + if e0.use_freestyle_mark and \ + ((e0.vertices[0] == vi and e0.vertices[1] == vi1) or \ + (e0.vertices[0] == vi1 and e0.vertices[1] == vi)): + #{ + weights[0] = 1 + #} + #} - flags = 0x00 - if obj.cv_data.bp0: flags |= 0x1 - if obj.cv_data.bp1: flags |= 0x4 - if obj.cv_data.bp2: flags |= 0x8 + TOLERENCE = float(10**4) + key = (int(co[0]*TOLERENCE+0.5), + int(co[1]*TOLERENCE+0.5), + int(co[2]*TOLERENCE+0.5), + int(norm[0]*TOLERENCE+0.5), + int(norm[1]*TOLERENCE+0.5), + int(norm[2]*TOLERENCE+0.5), + int(uv[0]*TOLERENCE+0.5), + int(uv[1]*TOLERENCE+0.5), + colour[0], # these guys are already quantized + colour[1], # . + colour[2], # . + colour[3], # . + weights[0], # v + weights[1], + weights[2], + weights[3], + groups[0], + groups[1], + groups[2], + groups[3]) + + if key in vertex_reference: + index = vertex_reference[key] + else:#{ + index = bytearray(c_uint32(sm.vertex_count)) + sm.vertex_count+=1 + + vertex_reference[key] = index + v = mdl_vert() + v.co[0] = co[0] + v.co[1] = co[2] + v.co[2] = -co[1] + v.norm[0] = norm[0] + v.norm[1] = norm[2] + v.norm[2] = -norm[1] + v.uv[0] = uv[0] + v.uv[1] = uv[1] + v.colour[0] = colour[0] + v.colour[1] = colour[1] + v.colour[2] = colour[2] + v.colour[3] = colour[3] + v.weights[0] = weights[0] + v.weights[1] = weights[1] + v.weights[2] = weights[2] + v.weights[3] = weights[3] + v.groups[0] = groups[0] + v.groups[1] = groups[1] + v.groups[2] = groups[2] + v.groups[3] = groups[3] + + for i in range(3):#{ + sm.bbx[0][i] = min( sm.bbx[0][i], v.co[i] ) + sm.bbx[1][i] = max( sm.bbx[1][i], v.co[i] ) + #} - if obj.cv_data.audio_format == 'stereo': - flags |= 0x200 - if obj.cv_data.audio_format == 'remain compressed': - flags |= 0x400 + sr_compile.vertex_data.extend(bytearray(v)) + #} + + sm.indice_count += 1 + sr_compile.indice_data.extend( index ) + #} + #} + + # Make sure bounding box isn't -inf -> inf if no vertices + # + if sm.vertex_count == 0: + for j in range(2): + for i in range(3): + sm.bbx[j][i] = 0 - _.flags = flags - _.volume = obj.cv_data.fltp + # Add submesh to encoder + # + sr_compile.submesh_data.extend( bytearray(sm) ) + node.submesh_count += 1 #} - - @staticmethod - def editor_interface( layout, obj ): - #{ - layout.prop( obj.cv_data, "strp", text = "File (.ogg)" ) - layout.prop( obj.cv_data, "bp0", text = "Looping" ) - layout.prop( obj.cv_data, "bp1", text = "3D Audio" ) - layout.prop( obj.cv_data, "bp2", text = "Auto Start" ) - layout.prop( obj.cv_data, "audio_format" ) - - layout.prop( obj.cv_data, "fltp", text = "Volume (0-1)" ) + if armature:#{ + armature.data.pose_position = POSE_OR_REST_CACHE #} - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours - - cv_draw_sphere( obj.location, obj.scale[0], [1,1,0,1] ) - #} + # Save a reference to this node since we want to reuse the submesh indices + # later. + sr_compile.mesh_cache[obj.data.name]=(node.submesh_start,node.submesh_count) + sr_compile.mesh_data.extend(bytearray(node)) #} -# Classtype 200 -# -# Purpose: world light -# -class classtype_world_light( Structure ): +def sr_compile_armature( obj ): #{ - _pack_ = 1 - _fields_ = [("type",c_uint32), - ("colour",c_float*4), - ("angle",c_float), - ("range",c_float)] - - def encode_obj(_, node, node_def): - #{ - node.classtype = 200 - - obj = node_def['obj'] - data = obj.data - _.colour[0] = data.color[0] - _.colour[1] = data.color[1] - _.colour[2] = data.color[2] - _.colour[3] = data.energy - _.range = data.cutoff_distance # this has to be manually set - # TODO: At some point, automate a min - # threshold value - - if obj.data.type == 'POINT': - #{ - _.type = 0 - _.angle = 0.0 + node = mdl_armature() + node.bone_start = len(sr_compile.bone_data)//sizeof(mdl_bone) + node.bone_count = 0 + node.anim_start = len(sr_compile.anim_data)//sizeof(mdl_animation) + node.anim_count = 0 + + bones = [_ for _ in sr_armature_bones(obj)] + bones_names = [None]+[_.name for _ in bones] + + for b in bones:#{ + bone = mdl_bone() + if b.use_deform: bone.flags = 0x1 + if b.parent: bone.parent = bones_names.index(b.parent.name) + + bone.collider = int(b.SR_data.collider) + + if bone.collider>0:#{ + bone.hitbox[0][0] = b.SR_data.collider_min[0] + bone.hitbox[0][1] = b.SR_data.collider_min[2] + bone.hitbox[0][2] = -b.SR_data.collider_max[1] + bone.hitbox[1][0] = b.SR_data.collider_max[0] + bone.hitbox[1][1] = b.SR_data.collider_max[2] + bone.hitbox[1][2] = -b.SR_data.collider_min[1] #} - elif obj.data.type == 'SPOT': - #{ - _.type = 1 - _.angle = data.spot_size*0.5 + + if b.SR_data.cone_constraint:#{ + bone.flags |= 0x4 + bone.conevx[0] = b.SR_data.conevx[0] + bone.conevx[1] = b.SR_data.conevx[2] + bone.conevx[2] = -b.SR_data.conevx[1] + bone.conevy[0] = b.SR_data.conevy[0] + bone.conevy[1] = b.SR_data.conevy[2] + bone.conevy[2] = -b.SR_data.conevy[1] + bone.coneva[0] = b.SR_data.coneva[0] + bone.coneva[1] = b.SR_data.coneva[2] + bone.coneva[2] = -b.SR_data.coneva[1] + bone.conet = b.SR_data.conet #} - if data.cv_data.bp0: - _.type += 2 - #} + bone.co[0] = b.head_local[0] + bone.co[1] = b.head_local[2] + bone.co[2] = -b.head_local[1] + bone.end[0] = b.tail_local[0] - bone.co[0] + bone.end[1] = b.tail_local[2] - bone.co[1] + bone.end[2] = -b.tail_local[1] - bone.co[2] + bone.pstr_name = sr_compile_string( b.name ) + + for c in obj.pose.bones[b.name].constraints:#{ + if c.type == 'IK':#{ + bone.flags |= 0x2 + bone.ik_target = bones_names.index(c.subtarget) + bone.ik_pole = bones_names.index(c.pole_subtarget) + #} + #} - @staticmethod - def editor_interface( layout, obj ): - #{ - pass + node.bone_count += 1 + sr_compile.bone_data.extend(bytearray(bone)) #} -#} -# Classtype 201 -# -# Purpose: lighting settings for world -# -class classtype_lighting_info(Structure): -#{ - _pack_ = 1 - _fields_ = [("colours",(c_float*3)*3), - ("directions",(c_float*2)*3), - ("states",c_uint32*3), - ("shadow_spread",c_float), - ("shadow_length",c_float), - ("ambient",c_float*3)] - - def encode_obj(_, node, node_def): - #{ - node.classtype = 201 - - # TODO - #} - - @staticmethod - def editor_interface( layout, obj ): - #{ - pass - #} -#} - -class classtype_spawn_link(Structure): -#{ - _pack_ = 1 - _fields_ = [("connections",c_uint32*4)] - - def encode_obj(_, node,node_def ): - #{ - node.classtype = 0 - #} - - @staticmethod - def editor_interface( layout, obj ): - #{ - pass - #} + # Compile anims + # + if obj.animation_data and sr_compile.pack_animations: #{ + # So we can restore later + # + previous_frame = bpy.context.scene.frame_current + previous_action = obj.animation_data.action + POSE_OR_REST_CACHE = obj.data.pose_position + obj.data.pose_position = 'POSE' - @staticmethod - def draw_scene_helpers( obj ): - #{ - global cv_view_verts, cv_view_colours + for NLALayer in obj.animation_data.nla_tracks:#{ + for NLAStrip in NLALayer.strips:#{ + # set active + # + for a in bpy.data.actions:#{ + if a.name == NLAStrip.name:#{ + obj.animation_data.action = a + break + #} + #} + + # Clip to NLA settings + # + anim_start = int(NLAStrip.action_frame_start) + anim_end = int(NLAStrip.action_frame_end) - count = 0 + # Export strips + # + anim = mdl_animation() + anim.pstr_name = sr_compile_string( NLAStrip.action.name ) + anim.rate = 30.0 + anim.keyframe_start = len(sr_compile.keyframe_data)//\ + sizeof(mdl_transform) + anim.length = anim_end-anim_start + + i = 0 + # Export the keyframes + for frame in range(anim_start,anim_end):#{ + bpy.context.scene.frame_set(frame) + + for rb in bones:#{ + pb = obj.pose.bones[rb.name] + + # relative bone matrix + if rb.parent is not None:#{ + offset_mtx = rb.parent.matrix_local + offset_mtx = offset_mtx.inverted_safe() @ \ + rb.matrix_local + + inv_parent = pb.parent.matrix @ offset_mtx + inv_parent.invert_safe() + fpm = inv_parent @ pb.matrix + #} + else:#{ + bone_mtx = rb.matrix.to_4x4() + local_inv = rb.matrix_local.inverted_safe() + fpm = bone_mtx @ local_inv @ pb.matrix + #} - for obj1 in bpy.context.collection.objects: - #{ - if (obj1.cv_data.classtype != 'classtype_spawn_link') and \ - (obj1.cv_data.classtype != 'classtype_spawn') : - continue + loc, rot, sca = fpm.decompose() + + # rotation + lc_m = pb.matrix_channel.to_3x3() + if pb.parent is not None:#{ + smtx = pb.parent.matrix_channel.to_3x3() + lc_m = smtx.inverted() @ lc_m + #} + rq = lc_m.to_quaternion() + + kf = mdl_transform() + kf.co[0] = loc[0] + kf.co[1] = loc[2] + kf.co[2] = -loc[1] + kf.q[0] = rq[1] + kf.q[1] = rq[3] + kf.q[2] = -rq[2] + kf.q[3] = rq[0] + kf.s[0] = sca[0] + kf.s[1] = sca[1] + kf.s[2] = sca[2] + sr_compile.keyframe_data.extend(bytearray(kf)) + + i+=1 + #} + #} + + # Add to animation buffer + # + sr_compile.anim_data.extend(bytearray(anim)) + node.anim_count += 1 - if (obj1.location - obj.location).length < 40.0: - #{ - cv_draw_line( obj.location, obj1.location, [1,1,1,1] ) - count +=1 + # Report progress + # + print( F"[SR] | anim( {NLAStrip.action.name} )" ) #} - - if count == 4: - break #} - cv_draw_sphere( obj.location, 20.0, [0.5,0,0.2,0.4] ) + # Restore context to how it was before + # + bpy.context.scene.frame_set( previous_frame ) + obj.animation_data.action = previous_action + obj.data.pose_position = POSE_OR_REST_CACHE #} -#} - -# ---------------------------------------------------------------------------- # -# # -# Compiler section # -# # -# ---------------------------------------------------------------------------- # -# Current encoder state -# -g_encoder = None + sr_compile.armature_data.extend(bytearray(node)) +#} -# Reset encoder -# -def encoder_init( collection ): +def sr_ent_push( struct ): #{ - global g_encoder - - g_encoder = \ - { - # The actual file header - # - 'header': mdl_header(), - - # Options - # - 'pack_textures': collection.cv_data.pack_textures, - - # Compiled data chunks (each can be read optionally by the client) - # - 'data': - { - #1--------------------------------- - 'node': [], # Metadata 'chunk' - 'submesh': [], - 'material': [], - 'texture': [], - 'anim': [], - 'entdata': bytearray(), # variable width - 'strings': bytearray(), # . - #2--------------------------------- - 'keyframe': [], # Animations - #3--------------------------------- - 'vertex': [], # Mesh data - 'indice': [], - #4--------------------------------- - 'pack': bytearray() # Other generic packed data - }, + clase = type(struct).__name__ - # All objects of the model in their final heirachy - # - "uid_count": 1, - "scene_graph":{}, - "graph_lookup":{}, - - # Allows us to reuse definitions - # - 'string_cache':{}, - 'mesh_cache': {}, - 'material_cache': {}, - 'texture_cache': {} - } - - g_encoder['header'].identifier = 0xABCD0000 - g_encoder['header'].version = 1 - - # Add fake NoneID material and texture - # - none_material = mdl_material() - none_material.pstr_name = encoder_process_pstr( "" ) - none_material.texture_id = 0 - - none_texture = mdl_texture() - none_texture.pstr_name = encoder_process_pstr( "" ) - none_texture.pack_offset = 0 - none_texture.pack_length = 0 - - g_encoder['data']['material'] += [none_material] - g_encoder['data']['texture'] += [none_texture] + if clase not in sr_compile.entity_data:#{ + sr_compile.entity_data[ clase ] = bytearray() + sr_compile.entity_info[ clase ] = { 'size': sizeof(struct) } + #} - g_encoder['data']['pack'].extend( b'datapack\0\0\0\0\0\0\0\0' ) + index = len(sr_compile.entity_data[ clase ])//sizeof(struct) + sr_compile.entity_data[ clase ].extend( bytearray(struct) ) + return index +#} - # Add root node - # - root = mdl_node() - root.co[0] = 0 - root.co[1] = 0 - root.co[2] = 0 - root.q[0] = 0 - root.q[1] = 0 - root.q[2] = 0 - root.q[3] = 1 - root.s[0] = 1 - root.s[1] = 1 - root.s[2] = 1 - root.pstr_name = encoder_process_pstr('') - root.submesh_start = 0 - root.submesh_count = 0 - root.offset = 0 - root.classtype = 0 - root.parent = 0xffffffff - - g_encoder['data']['node'] += [root] -#} - - -# fill with 0x00 until a multiple of align. Returns how many bytes it added -# -def bytearray_align_to( buffer, align, offset=0 ): +def sr_array_title( arr, name, count, size, offset ): #{ - count = 0 - - while ((len(buffer)+offset) % align) != 0: - #{ - buffer.extend( b'\0' ) - count += 1 + for i in range(len(name)):#{ + arr.name[i] = ord(name[i]) #} - - return count + arr.file_offset = offset + arr.item_count = count + arr.item_size = size #} -# Add a string to the string buffer except if it already exists there then we -# just return its ID. -# -def encoder_process_pstr( s ): +def sr_compile( collection ): #{ - global g_encoder + print( F"[SR] compiler begin ({collection.name}.mdl)" ) - cache = g_encoder['string_cache'] + #settings + sr_compile.pack_textures = collection.SR_data.pack_textures + sr_compile.pack_animations = collection.SR_data.animations - if s in cache: - return cache[s] + # caches + sr_compile.string_cache = {} + sr_compile.mesh_cache = {} + sr_compile.material_cache = {} + sr_compile.texture_cache = {} - cache[s] = len( g_encoder['data']['strings'] ) - - buffer = g_encoder['data']['strings'] - buffer.extend( s.encode('utf-8') ) - buffer.extend( b'\0' ) + # compiled data + sr_compile.mesh_data = bytearray() + sr_compile.submesh_data = bytearray() + sr_compile.vertex_data = bytearray() + sr_compile.indice_data = bytearray() + sr_compile.bone_data = bytearray() + sr_compile.material_data = bytearray() + sr_compile.armature_data = bytearray() + sr_compile.anim_data = bytearray() + sr_compile.keyframe_data = bytearray() + sr_compile.texture_data = bytearray() - bytearray_align_to( buffer, 4 ) - return cache[s] -#} - -def get_texture_resource_name( img ): -#{ - return os.path.splitext( img.name )[0] -#} - -# Pack a texture -# -def encoder_process_texture( img ): -#{ - global g_encoder + # just bytes not structures + sr_compile.string_data = bytearray() + sr_compile.pack_data = bytearray() - if img == None: - return 0 + # variable + sr_compile.entity_data = {} + sr_compile.entity_info = {} - cache = g_encoder['texture_cache'] - buffer = g_encoder['data']['texture'] - pack = g_encoder['data']['pack'] + print( F"[SR] assign entity ID's" ) + sr_compile.entities = {} + sr_compile.entity_ids = {} - name = get_texture_resource_name( img ) + mesh_count = 0 + for obj in collection.all_objects: #{ + if obj.type == 'MESH': mesh_count += 1 - if name in cache: - return cache[name] - - cache[name] = len( buffer ) - - tex = mdl_texture() - tex.pstr_name = encoder_process_pstr( name ) + ent_type = obj_ent_type( obj ) + if ent_type == 'none': continue - if g_encoder['pack_textures']: - #{ - tex.pack_offset = len( pack ) - pack.extend( qoi_encode( img ) ) - tex.pack_length = len( pack ) - tex.pack_offset + if ent_type not in sr_compile.entities: sr_compile.entities[ent_type] = [] + sr_compile.entity_ids[obj.name] = len( sr_compile.entities[ent_type] ) + sr_compile.entities[ent_type] += [obj] #} - else: - tex.pack_offset = 0 - buffer += [ tex ] - return cache[name] -#} + print( F"[SR] Compiling geometry" ) + i=0 + for obj in collection.all_objects:#{ + if obj.type == 'MESH':#{ + i+=1 + print( F'[SR] {i: 3}/{mesh_count} {obj.name:<40}', end='\r' ) + sr_compile_mesh( obj ) + #} + #} -def material_tex_image(v): -#{ - return { - "Image Texture": - { - "image": F"{v}" - } - } -#} + checkpoint_count = 0 + pathindice_count = 0 -cxr_graph_mapping = \ -{ - # Default shader setup - "Principled BSDF": - { - "Base Color": - { - "Image Texture": - { - "image": "tex_diffuse" - }, - "Mix": - { - "A": material_tex_image("tex_diffuse"), - "B": material_tex_image("tex_decal") - }, - }, - "Normal": - { - "Normal Map": - { - "Color": material_tex_image("tex_normal") - } - } - } -} + for ent_type, arr in sr_compile.entities.items():#{ + print(F"[SR] Compiling {len(arr)} {ent_type}{'s' if len(arr)>1 else ''}") -# https://harrygodden.com/git/?p=convexer.git;a=blob;f=__init__.py;#l1164 -# -def material_info(mat): -#{ - info = {} + for i in range(len(arr)):#{ + obj = arr[i] - # Using the cv_graph_mapping as a reference, go through the shader - # graph and gather all $props from it. - # - def _graph_read( node_def, node=None, depth=0 ): - #{ - nonlocal mat - nonlocal info - - # Find rootnodes - # - if node == None: - #{ - _graph_read.extracted = [] + print( F"[SR] {i+1: 3}/{len(arr)} {obj.name:<40} ",end='\r' ) - for node_idname in node_def: - #{ - for n in mat.node_tree.nodes: - #{ - if n.name == node_idname: - #{ - node_def = node_def[node_idname] - node = n - break - #} + if ent_type == 'mdl_armature': sr_compile_armature(obj) + elif ent_type == 'ent_light': #{ + light = ent_light() + compile_obj_transform( obj, light.transform ) + light.daytime = obj.data.SR_data.daytime + if obj.data.type == 'POINT':#{ + light.type = 0 + #} + elif obj.data.type == 'SPOT':#{ + light.type = 1 + light.angle = obj.data.spot_size*0.5 #} + light.range = obj.data.cutoff_distance + light.colour[0] = obj.data.color[0] + light.colour[1] = obj.data.color[1] + light.colour[2] = obj.data.color[2] + light.colour[3] = obj.data.energy + sr_ent_push( light ) #} - #} + elif ent_type == 'ent_gate': #{ + gate = ent_gate() + gate.type = 0 + obj_data = obj.SR_data.ent_gate[0] + mesh_data = obj.data.SR_data.ent_gate[0] + if obj_data.target:#{ + gate.target = sr_compile.entity_ids[obj_data.target.name] + gate.type = 1 + #} + gate.dimensions[0] = mesh_data.dimensions[0] + gate.dimensions[1] = mesh_data.dimensions[1] + gate.dimensions[2] = mesh_data.dimensions[2] - for link in node_def: - #{ - link_def = node_def[link] + q = [obj.matrix_local.to_quaternion(), (0,0,0,1)] + co = [obj.matrix_world @ Vector((0,0,0)), (0,0,0)] - if isinstance( link_def, dict ): - #{ - node_link = None - for x in node.inputs: - #{ - if isinstance( x, bpy.types.NodeSocketColor ): - #{ - if link == x.name: - #{ - node_link = x - break - #} - #} + if obj_data.target:#{ + q[1] = obj_data.target.matrix_local.to_quaternion() + co[1]= obj_data.target.matrix_world @ Vector((0,0,0)) + #} + + # Setup transform + # + for x in range(2):#{ + gate.co[x][0] = co[x][0] + gate.co[x][1] = co[x][2] + gate.co[x][2] = -co[x][1] + gate.q[x][0] = q[x][1] + gate.q[x][1] = q[x][3] + gate.q[x][2] = -q[x][2] + gate.q[x][3] = q[x][0] #} - if node_link and node_link.is_linked: - #{ - # look for definitions for the connected node type - # - from_node = node_link.links[0].from_node - - node_name = from_node.name.split('.')[0] - if node_name in link_def: - #{ - from_node_def = link_def[ node_name ] + sr_ent_push( gate ) + #} + elif ent_type == 'ent_spawn': #{ + spawn = ent_spawn() + compile_obj_transform( obj, spawn.transform ) + obj_data = obj.SR_data.ent_spawn[0] + spawn.pstr_name = sr_compile_string( obj_data.name ) + sr_ent_push( spawn ) + #} + elif ent_type == 'ent_route': #{ + obj_data = obj.SR_data.ent_route[0] + route = ent_route() + route.pstr_name = sr_compile_string( obj_data.alias ) #TODO + route.checkpoints_start = checkpoint_count + route.checkpoints_count = 0 + + for ci in range(3): + route.colour[ci] = obj_data.colour[ci] + route.colour[3] = 1.0 + + compile_obj_transform( obj, route.transform ) + + checkpoints = obj_data.gates + route_nodes = [] + + for uc in obj.users_collection[0].objects:#{ + uc_type = obj_ent_type( uc ) + if uc_type == 'ent_gate' or uc_type == 'ent_route_node': + route_nodes += [uc] + #} + graph = node_graph( route_nodes ) - _graph_read( from_node_def, from_node, depth+1 ) + for i in range(len(checkpoints)):#{ + gi = checkpoints[i].target + gj = checkpoints[(i+1)%len(checkpoints)].target + gate = gi + + if gi:#{ + dest = gi.SR_data.ent_gate[0].target + gi = dest #} + + if gi==gj: continue # error? + if not gi or not gj: continue + + checkpoint = ent_checkpoint() + checkpoint.gate_index = sr_compile.entity_ids[gate.name] + checkpoint.path_start = pathindice_count + checkpoint.path_count = 0 - # No definition! :( - # TODO: Make a warning for this? - #} - else: - #{ - if "default" in link_def: - #{ - prop = link_def['default'] - info[prop] = node_link.default_value + path = dijkstra( graph, gj.name, gi.name ) + if path:#{ + for pi in range(1,len(path)-1):#{ + pathindice = ent_path_index() + pathindice.index = sr_compile.entity_ids[path[pi]] + sr_ent_push( pathindice ) + + checkpoint.path_count += 1 + pathindice_count += 1 + #} #} + + sr_ent_push( checkpoint ) + route.checkpoints_count += 1 + checkpoint_count += 1 #} + + sr_ent_push( route ) #} - else: - #{ - prop = link_def - info[prop] = getattr( node, link ) + elif ent_type == 'ent_route_node':#{ + rn = ent_route_node() + rn.co[0] = obj.location[0] + rn.co[1] = obj.location[2] + rn.co[2] = -obj.location[1] + sr_ent_push( rn ) #} #} #} + + print( F"[SR] Writing file" ) - _graph_read( cxr_graph_mapping ) - return info -#} + file_array_instructions = {} + file_offset = 0 -# Add a material to the material buffer. Returns 0 (None ID) if invalid -# -def encoder_process_material( mat ): -#{ - global g_encoder + def _write_array( name, item_size, data ):#{ + nonlocal file_array_instructions, file_offset - if mat == None: - return 0 + count = len(data)//item_size + file_array_instructions[name] = {'count':count, 'size':item_size,\ + 'data':data, 'offset': file_offset} + file_offset += len(data) + file_offset = int_align_to( file_offset, 8 ) + #} - cache = g_encoder['material_cache'] - buffer = g_encoder['data']['material'] + _write_array( 'strings', 1, sr_compile.string_data ) + _write_array( 'mdl_mesh', sizeof(mdl_mesh), sr_compile.mesh_data ) + _write_array( 'mdl_submesh', sizeof(mdl_submesh), sr_compile.submesh_data ) + _write_array( 'mdl_material', sizeof(mdl_material), sr_compile.material_data) + _write_array( 'mdl_texture', sizeof(mdl_texture), sr_compile.texture_data) + _write_array( 'mdl_armature', sizeof(mdl_armature), sr_compile.armature_data) + _write_array( 'mdl_bone', sizeof(mdl_bone), sr_compile.bone_data ) - if mat.name in cache: - return cache[mat.name] + for name, buffer in sr_compile.entity_data.items():#{ + _write_array( name, sr_compile.entity_info[name]['size'], buffer ) + #} - cache[mat.name] = len( buffer ) + _write_array( 'mdl_animation', sizeof(mdl_animation), sr_compile.anim_data) + _write_array( 'mdl_keyframe', sizeof(mdl_transform),sr_compile.keyframe_data) + _write_array( 'mdl_vert', sizeof(mdl_vert), sr_compile.vertex_data ) + _write_array( 'mdl_indice', sizeof(c_uint32), sr_compile.indice_data ) + _write_array( 'pack', 1, sr_compile.pack_data ) - dest = mdl_material() - dest.pstr_name = encoder_process_pstr( mat.name ) - - flags = 0x00 - if mat.cv_data.collision: - flags |= 0x2 - if mat.cv_data.skate_surface: flags |= 0x1 - if mat.cv_data.grind_surface: flags |= (0x8|0x1) + header_size = int_align_to( sizeof(mdl_header), 8 ) + index_size = int_align_to( sizeof(mdl_array)*len(file_array_instructions),8 ) - if mat.cv_data.grow_grass: flags |= 0x4 - dest.flags = flags + folder = bpy.path.abspath(bpy.context.scene.SR_data.export_dir) + path = F"{folder}{collection.name}.mdl" + print( path ) - if mat.cv_data.surface_prop == 'concrete': dest.surface_prop = 0 - if mat.cv_data.surface_prop == 'wood': dest.surface_prop = 1 - if mat.cv_data.surface_prop == 'grass': dest.surface_prop = 2 - if mat.cv_data.surface_prop == 'tiles': dest.surface_prop = 3 + fp = open( path, "wb" ) + header = mdl_header() + header.version = 40 + sr_array_title( header.arrays, \ + 'index', len(file_array_instructions), \ + sizeof(mdl_array), header_size ) - if mat.cv_data.shader == 'standard': dest.shader = 0 - if mat.cv_data.shader == 'standard_cutout': dest.shader = 1 - if mat.cv_data.shader == 'terrain_blend': - #{ - dest.shader = 2 + fp.write( bytearray_align_to( bytearray(header), 8 ) ) - dest.colour[0] = pow( mat.cv_data.sand_colour[0], 1.0/2.2 ) - dest.colour[1] = pow( mat.cv_data.sand_colour[1], 1.0/2.2 ) - dest.colour[2] = pow( mat.cv_data.sand_colour[2], 1.0/2.2 ) - dest.colour[3] = 1.0 + print( F'[SR] {"name":>16}| count | offset' ) + index = bytearray() + for name,info in file_array_instructions.items():#{ + arr = mdl_array() + offset = info['offset'] + header_size + index_size + sr_array_title( arr, name, info['count'], info['size'], offset ) + index.extend( bytearray(arr) ) - dest.colour1[0] = mat.cv_data.blend_offset[0] - dest.colour1[1] = mat.cv_data.blend_offset[1] + print( F'[SR] {name:>16}| {info["count"]: 8} '+\ + F' 0x{info["offset"]:02x}' ) #} + fp.write( bytearray_align_to( index, 8 ) ) + #bytearray_print_hex( index ) - if mat.cv_data.shader == 'vertex_blend': - #{ - dest.shader = 3 - - dest.colour1[0] = mat.cv_data.blend_offset[0] - dest.colour1[1] = mat.cv_data.blend_offset[1] + for name,info in file_array_instructions.items():#{ + fp.write( bytearray_align_to( info['data'], 8 ) ) #} - if mat.cv_data.shader == 'water': - #{ - dest.shader = 4 - - dest.colour[0] = pow( mat.cv_data.shore_colour[0], 1.0/2.2 ) - dest.colour[1] = pow( mat.cv_data.shore_colour[1], 1.0/2.2 ) - dest.colour[2] = pow( mat.cv_data.shore_colour[2], 1.0/2.2 ) - dest.colour[3] = 1.0 - dest.colour1[0] = pow( mat.cv_data.ocean_colour[0], 1.0/2.2 ) - dest.colour1[1] = pow( mat.cv_data.ocean_colour[1], 1.0/2.2 ) - dest.colour1[2] = pow( mat.cv_data.ocean_colour[2], 1.0/2.2 ) - dest.colour1[3] = 1.0 - #} - - inf = material_info( mat ) - - if mat.cv_data.shader == 'standard' or \ - mat.cv_data.shader == 'standard_cutout' or \ - mat.cv_data.shader == 'terrain_blend' or \ - mat.cv_data.shader == 'vertex_blend': - #{ - if 'tex_diffuse' in inf: - dest.tex_diffuse = encoder_process_texture(inf['tex_diffuse']) - #} + fp.close() - buffer += [dest] - return cache[mat.name] + print( '[SR] done' ) #} -# Create a tree structure containing all the objects in the collection -# -def encoder_build_scene_graph( collection ): +class SR_SCENE_SETTINGS(bpy.types.PropertyGroup): #{ - global g_encoder + use_hidden: bpy.props.BoolProperty( name="use hidden", default=False ) + export_dir: bpy.props.StringProperty( name="Export Dir", subtype='DIR_PATH' ) + gizmos: bpy.props.BoolProperty( name="Draw Gizmos", default=True ) - print( " creating scene graph" ) + panel: bpy.props.EnumProperty( + name='Panel', + description='', + items=[ + ('EXPORT', 'Export', '', 'MOD_BUILD',0), + ('ENTITY', 'Entity', '', 'MONKEY',1), + ('SETTINGS', 'Settings', 'Settings', 'PREFERENCES',2), + ], + ) +#} - # initialize root - # - graph = g_encoder['scene_graph'] - graph_lookup = g_encoder['graph_lookup'] - graph["obj"] = None - graph["depth"] = 0 - graph["children"] = [] - graph["uid"] = 0 - graph["parent"] = None - - def _new_uid(): - #{ - global g_encoder - uid = g_encoder['uid_count'] - g_encoder['uid_count'] += 1 - return uid - #} +class SR_COLLECTION_SETTINGS(bpy.types.PropertyGroup): +#{ + pack_textures: bpy.props.BoolProperty( name="Pack Textures", default=False ) + animations: bpy.props.BoolProperty( name="Export animation", default=True) +#} - for obj in collection.all_objects: - #{ - #if obj.parent: continue +def sr_get_mirror_bone( bones ): +#{ + side = bones.active.name[-1:] + other_name = bones.active.name[:-1] + if side == 'L': other_name += 'R' + elif side == 'R': other_name += 'L' + else: return None - def _extend( p, n, d ): - #{ - nonlocal collection - - uid = _new_uid() - tree = {} - tree["uid"] = uid - tree["children"] = [] - tree["depth"] = d - tree["obj"] = n - tree["parent"] = p - n.cv_data.uid = uid - - # Descend into amature - # - if n.type == 'ARMATURE': - #{ - tree["bones"] = [None] # None is the root transform - tree["ik_count"] = 0 - tree["collider_count"] = 0 - tree["compile_animation"] = collection.cv_data.animations - - # Here also collects some information about constraints, ik and - # counts colliders for the armature. - # - def _extendb( p, n, d ): - #{ - nonlocal tree - - btree = {} - btree["bone"] = n - btree["linked_armature"] = tree - btree["uid"] = _new_uid() - btree["children"] = [] - btree["depth"] = d - btree["parent"] = p - tree["bones"] += [n.name] - - for c in n.children: - #{ - _extendb( btree, c, d+1 ) - #} - - for c in tree['obj'].pose.bones[n.name].constraints: - #{ - if c.type == 'IK': - #{ - btree["ik_target"] = c.subtarget - btree["ik_pole"] = c.pole_subtarget - tree["ik_count"] += 1 - #} - #} + for b in bones:#{ + if b.name == other_name: + return b + #} + + return None +#} - if n.cv_data.collider != 'collider_none': - tree['collider_count'] += 1 +class SR_MIRROR_BONE_X(bpy.types.Operator): +#{ + bl_idname="skaterift.mirror_bone" + bl_label="Mirror bone attributes - SkateRift" - btree['deform'] = n.use_deform - p['children'] += [btree] - #} + def execute(_,context): + #{ + active_object = context.active_object + bones = active_object.data.bones + a = bones.active + b = sr_get_mirror_bone( bones ) - for b in n.data.bones: - if not b.parent: - _extendb( tree, b, d+1 ) - #} - - # Recurse into children of this object - # - for obj1 in n.children: - #{ - for c1 in obj1.users_collection: - #{ - if c1 == collection: - #{ - _extend( tree, obj1, d+1 ) - break - #} - #} - #} + if not b: return {'FINISHED'} - p["children"] += [tree] - graph_lookup[n] = tree + b.SR_data.collider = a.SR_data.collider + def _v3copyflipy( a, b ):#{ + b[0] = a[0] + b[1] = -a[1] + b[2] = a[2] #} - _extend( graph, obj, 1 ) + _v3copyflipy( a.SR_data.collider_min, b.SR_data.collider_min ) + _v3copyflipy( a.SR_data.collider_max, b.SR_data.collider_max ) + b.SR_data.collider_min[1] = -a.SR_data.collider_max[1] + b.SR_data.collider_max[1] = -a.SR_data.collider_min[1] - #} -#} + b.SR_data.cone_constraint = a.SR_data.cone_constraint + _v3copyflipy( a.SR_data.conevx, b.SR_data.conevy ) + _v3copyflipy( a.SR_data.conevy, b.SR_data.conevx ) + _v3copyflipy( a.SR_data.coneva, b.SR_data.coneva ) -# Kind of a useless thing i made but it looks cool and adds complexity!!1 -# -def encoder_graph_iterator( root ): -#{ - for c in root['children']: - #{ - yield c - yield from encoder_graph_iterator(c) + b.SR_data.conet = a.SR_data.conet + + # redraw + ob = bpy.context.scene.objects[0] + ob.hide_render = ob.hide_render + return {'FINISHED'} #} #} - -# Push a vertex into the model file, or return a cached index (c_uint32) -# -def encoder_vertex_push( vertex_reference, co,norm,uv,colour,groups,weights ): +class SR_COMPILE(bpy.types.Operator): #{ - global g_encoder - buffer = g_encoder['data']['vertex'] + bl_idname="skaterift.compile_all" + bl_label="Compile All" - TOLERENCE = 4 - m = float(10**TOLERENCE) - - # Would be nice to know if this can be done faster than it currently runs, - # its quite slow. - # - key = (int(co[0]*m+0.5), - int(co[1]*m+0.5), - int(co[2]*m+0.5), - int(norm[0]*m+0.5), - int(norm[1]*m+0.5), - int(norm[2]*m+0.5), - int(uv[0]*m+0.5), - int(uv[1]*m+0.5), - colour[0], # these guys are already quantized - colour[1], # . - colour[2], # . - colour[3], # . - weights[0], # v - weights[1], - weights[2], - weights[3], - groups[0], - groups[1], - groups[2], - groups[3]) - - if key in vertex_reference: - return vertex_reference[key] - else: + def execute(_,context): #{ - index = c_uint32( len(vertex_reference) ) - vertex_reference[key] = index - - v = mdl_vert() - v.co[0] = co[0] - v.co[1] = co[2] - v.co[2] = -co[1] - v.norm[0] = norm[0] - v.norm[1] = norm[2] - v.norm[2] = -norm[1] - v.uv[0] = uv[0] - v.uv[1] = uv[1] - v.colour[0] = colour[0] - v.colour[1] = colour[1] - v.colour[2] = colour[2] - v.colour[3] = colour[3] - v.weights[0] = weights[0] - v.weights[1] = weights[1] - v.weights[2] = weights[2] - v.weights[3] = weights[3] - v.groups[0] = groups[0] - v.groups[1] = groups[1] - v.groups[2] = groups[2] - v.groups[3] = groups[3] - - buffer += [v] - return index + view_layer = bpy.context.view_layer + for col in view_layer.layer_collection.children["export"].children: + if not col.hide_viewport or bpy.context.scene.SR_data.use_hidden: + sr_compile( bpy.data.collections[col.name] ) + + return {'FINISHED'} #} #} - -# Compile a mesh (or use one from the cache) onto node, based on node_def -# No return value -# -def encoder_compile_mesh( node, node_def ): +class SR_COMPILE_THIS(bpy.types.Operator): #{ - global g_encoder - - graph = g_encoder['scene_graph'] - graph_lookup = g_encoder['graph_lookup'] - mesh_cache = g_encoder['mesh_cache'] - obj = node_def['obj'] - armature_def = None - can_use_cache = True - - # Check for modifiers that typically change the data per-instance - # there is no well defined rule for the choices here, its just what i've - # needed while producing the game. - # - # It may be possible to detect these cases automatically. - # - for mod in obj.modifiers: - #{ - if mod.type == 'DATA_TRANSFER' or mod.type == 'SHRINKWRAP' or \ - mod.type == 'BOOLEAN' or mod.type == 'CURVE' or \ - mod.type == 'ARRAY': - #{ - can_use_cache = False - #} - - if mod.type == 'ARMATURE': - armature_def = graph_lookup[mod.object] + bl_idname="skaterift.compile_this" + bl_label="Compile This collection" - # Check the cache first - # - if can_use_cache and (obj.data.name in mesh_cache): + def execute(_,context): #{ - ref = mesh_cache[obj.data.name] - node.submesh_start = ref.submesh_start - node.submesh_count = ref.submesh_count - return + col = bpy.context.collection + sr_compile( col ) + + return {'FINISHED'} #} +#} - # Compile a whole new mesh - # - node.submesh_start = len( g_encoder['data']['submesh'] ) - node.submesh_count = 0 +class SR_INTERFACE(bpy.types.Panel): +#{ + bl_idname = "VIEW3D_PT_skate_rift" + bl_label = "Skate Rift" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Skate Rift" - dgraph = bpy.context.evaluated_depsgraph_get() - data = obj.evaluated_get(dgraph).data - data.calc_loop_triangles() - data.calc_normals_split() - - # Mesh is split into submeshes based on their material - # - mat_list = data.materials if len(data.materials) > 0 else [None] - for material_id, mat in enumerate(mat_list): + def draw(_, context): #{ - mref = {} + # Compiler section - sm = mdl_submesh() - sm.indice_start = len( g_encoder['data']['indice'] ) - sm.vertex_start = len( g_encoder['data']['vertex'] ) - sm.vertex_count = 0 - sm.indice_count = 0 - sm.material_id = encoder_process_material( mat ) + row = _.layout.row() + row.scale_y = 1.75 + row.prop( context.scene.SR_data, 'panel', expand=True ) - for i in range(3): - #{ - sm.bbx[0][i] = 999999 - sm.bbx[1][i] = -999999 + if context.scene.SR_data.panel == 'SETTINGS': #{ + _.layout.prop( context.scene.SR_data, 'gizmos' ) #} - - # Keep a reference to very very very similar vertices - # - vertex_reference = {} - - # Write the vertex / indice data - # - for tri_index, tri in enumerate(data.loop_triangles): - #{ - if tri.material_index != material_id: - continue + elif context.scene.SR_data.panel == 'EXPORT': #{ + _.layout.prop( context.scene.SR_data, "export_dir" ) + col = bpy.context.collection + + found_in_export = False + export_count = 0 + view_layer = bpy.context.view_layer + for c1 in view_layer.layer_collection.children["export"].children: #{ + if not c1.hide_viewport or bpy.context.scene.SR_data.use_hidden: + export_count += 1 + + if c1.name == col.name: #{ + found_in_export = True + #} + #} - for j in range(3): - #{ - vert = data.vertices[tri.vertices[j]] - li = tri.loops[j] - vi = data.loops[li].vertex_index - - # Gather vertex information - # - co = vert.co - norm = data.loops[li].normal - uv = (0,0) - colour = (255,255,255,255) - groups = [0,0,0,0] - weights = [0,0,0,0] + box = _.layout.box() + row = box.row() + row.alignment = 'CENTER' + row.scale_y = 1.5 + + if found_in_export: #{ + row.label( text=col.name + ".mdl" ) + box.prop( col.SR_data, "pack_textures" ) + box.prop( col.SR_data, "animations" ) + box.operator( "skaterift.compile_this" ) + #} + else: #{ + row.enabled=False + row.label( text=col.name ) + + row = box.row() + row.enabled=False + row.alignment = 'CENTER' + row.scale_y = 1.5 + row.label( text="This collection is not in the export group" ) + #} - # Uvs - # - if data.uv_layers: - uv = data.uv_layers.active.data[li].uv - - # Vertex Colours - # - if data.vertex_colors: - #{ - colour = data.vertex_colors.active.data[li].color - colour = (int(colour[0]*255.0),\ - int(colour[1]*255.0),\ - int(colour[2]*255.0),\ - int(colour[3]*255.0)) - #} - - # Weight groups: truncates to the 3 with the most influence. The - # fourth bone ID is never used by the shader so it is - # always 0 - # - if armature_def: - #{ - src_groups = [_ for _ in data.vertices[vi].groups \ - if obj.vertex_groups[_.group].name in \ - armature_def['bones']] + box = _.layout.box() + row = box.row() - weight_groups = sorted( src_groups, key = \ - lambda a: a.weight, reverse=True ) - tot = 0.0 - for ml in range(3): - #{ - if len(weight_groups) > ml: - #{ - g = weight_groups[ml] - name = obj.vertex_groups[g.group].name - weight = g.weight + split = row.split( factor=0.3, align=True ) + split.prop( context.scene.SR_data, "use_hidden", text="hidden" ) + + row1 = split.row() + if export_count == 0: + row1.enabled=False + row1.operator( "skaterift.compile_all", \ + text=F"Compile all ({export_count} collections)" ) + #} + elif context.scene.SR_data.panel == 'ENTITY': #{ + active_object = context.active_object + if not active_object: return - weights[ml] = weight - groups[ml] = armature_def['bones'].index(name) - tot += weight - #} - #} + box = _.layout.box() + row = box.row() + row.alignment = 'CENTER' + row.label( text=active_object.name ) + row.scale_y = 1.5 + + def _draw_prop_collection( data ): #{ + nonlocal box + row = box.row() + row.alignment = 'CENTER' + row.enabled = False + row.scale_y = 1.5 + row.label( text=F'{data[0]}' ) - if len(weight_groups) > 0: - #{ - inv_norm = (1.0/tot) * 65535.0 - for ml in range(3): - #{ - weights[ml] = int( weights[ml] * inv_norm ) - weights[ml] = min( weights[ml], 65535 ) - weights[ml] = max( weights[ml], 0 ) - #} - #} + if hasattr(type(data[0]),'sr_inspector'):#{ + type(data[0]).sr_inspector( box, data ) #} - else: - #{ - li1 = tri.loops[(j+1)%3] - vi1 = data.loops[li1].vertex_index - e0 = data.edges[ data.loops[li].edge_index ] + else:#{ + for a in data[0].__annotations__: + box.prop( data[0], a ) + #} + #} - if e0.use_freestyle_mark and \ - ((e0.vertices[0] == vi and e0.vertices[1] == vi1) or \ - (e0.vertices[0] == vi1 and e0.vertices[1] == vi)): - #{ - weights[0] = 1 + if active_object.type == 'ARMATURE': #{ + if active_object.mode == 'POSE': #{ + bones = active_object.data.bones + mb = sr_get_mirror_bone( bones ) + if mb:#{ + box.operator( "skaterift.mirror_bone", \ + text=F'Mirror attributes to {mb.name}' ) #} + + _draw_prop_collection( [bones.active.SR_data ] ) + #} + else: #{ + row = box.row() + row.alignment='CENTER' + row.scale_y=2.0 + row.enabled=False + row.label( text="Enter pose mode to modify bone properties" ) #} - - # Add vertex and expand bound box - # - index = encoder_vertex_push( vertex_reference, co, \ - norm, \ - uv, \ - colour, \ - groups, \ - weights ) - g_encoder['data']['indice'] += [index] #} - #} - - # How many unique verts did we add in total - # - sm.vertex_count = len(g_encoder['data']['vertex']) - sm.vertex_start - sm.indice_count = len(g_encoder['data']['indice']) - sm.indice_start - - # Make sure bounding box isn't -inf -> inf if no vertices - # - if sm.vertex_count == 0: - for j in range(2): - for i in range(3): - sm.bbx[j][i] = 0 - else: - #{ - for j in range(sm.vertex_count): - #{ - vert = g_encoder['data']['vertex'][ sm.vertex_start + j ] + elif active_object.type == 'LIGHT': #{ + _draw_prop_collection( [active_object.data.SR_data] ) + #} + elif active_object.type == 'EMPTY' or active_object.type == 'MESH': #{ + box.prop( active_object.SR_data, "ent_type" ) + ent_type = active_object.SR_data.ent_type + + col = getattr( active_object.SR_data, ent_type, None ) + if col != None and len(col)!=0: _draw_prop_collection( col ) - for i in range(3): - #{ - sm.bbx[0][i] = min( sm.bbx[0][i], vert.co[i] ) - sm.bbx[1][i] = max( sm.bbx[1][i], vert.co[i] ) + if active_object.type == 'MESH':#{ + col = getattr( active_object.data.SR_data, ent_type, None ) + if col != None and len(col)!=0: _draw_prop_collection( col ) #} #} #} - - # Add submesh to encoder - # - g_encoder['data']['submesh'] += [sm] - node.submesh_count += 1 - #} - - # Save a reference to this node since we want to reuse the submesh indices - # later. - g_encoder['mesh_cache'][obj.data.name] = node #} - -def encoder_compile_ent_as( name, node, node_def ): +class SR_MATERIAL_PANEL(bpy.types.Panel): #{ - global g_encoder - - if name == 'classtype_none': - #{ - node.offset = 0 - node.classtype = 0 - return - #} - elif name not in globals(): - #{ - print( "Classtype '" +name + "' is unknown!" ) - return - #} + bl_label="Skate Rift material" + bl_idname="MATERIAL_PT_sr_material" + bl_space_type='PROPERTIES' + bl_region_type='WINDOW' + bl_context="material" - buffer = g_encoder['data']['entdata'] - node.offset = len(buffer) + def draw(_,context): + #{ + active_object = bpy.context.active_object + if active_object == None: return + active_mat = active_object.active_material + if active_mat == None: return - cl = globals()[ name ] - inst = cl() - inst.encode_obj( node, node_def ) + info = material_info( active_mat ) - buffer.extend( bytearray(inst) ) - bytearray_align_to( buffer, 4 ) -#} + if 'tex_diffuse' in info:#{ + _.layout.label( icon='INFO', \ + text=F"{info['tex_diffuse'].name} will be compiled" ) + #} -# Compiles animation data into model and gives us some extra node_def entries -# -def encoder_compile_armature( node, node_def ): -#{ - global g_encoder - - entdata = g_encoder['data']['entdata'] - animdata = g_encoder['data']['anim'] - keyframedata = g_encoder['data']['keyframe'] - mesh_cache = g_encoder['mesh_cache'] - obj = node_def['obj'] - bones = node_def['bones'] - - # extra info - node_def['anim_start'] = len(animdata) - node_def['anim_count'] = 0 - - if not node_def['compile_animation']: - #{ - return - #} + _.layout.prop( active_mat.SR_data, "shader" ) + _.layout.prop( active_mat.SR_data, "surface_prop" ) + _.layout.prop( active_mat.SR_data, "collision" ) - # Compile anims - # - if obj.animation_data: - #{ - # So we can restore later - # - previous_frame = bpy.context.scene.frame_current - previous_action = obj.animation_data.action - POSE_OR_REST_CACHE = obj.data.pose_position - obj.data.pose_position = 'POSE' + if active_mat.SR_data.collision:#{ + _.layout.prop( active_mat.SR_data, "skate_surface" ) + _.layout.prop( active_mat.SR_data, "grind_surface" ) + _.layout.prop( active_mat.SR_data, "grow_grass" ) + #} - for NLALayer in obj.animation_data.nla_tracks: - #{ - for NLAStrip in NLALayer.strips: - #{ - # set active - # - for a in bpy.data.actions: - #{ - if a.name == NLAStrip.name: - #{ - obj.animation_data.action = a - break - #} - #} - - # Clip to NLA settings - # - anim_start = int(NLAStrip.action_frame_start) - anim_end = int(NLAStrip.action_frame_end) + if active_mat.SR_data.shader == "terrain_blend":#{ + box = _.layout.box() + box.prop( active_mat.SR_data, "blend_offset" ) + box.prop( active_mat.SR_data, "sand_colour" ) + #} + elif active_mat.SR_data.shader == "vertex_blend":#{ + box = _.layout.box() + box.label( icon='INFO', text="Uses vertex colours, the R channel" ) + box.prop( active_mat.SR_data, "blend_offset" ) + #} + elif active_mat.SR_data.shader == "water":#{ + box = _.layout.box() + box.label( icon='INFO', text="Depth scale of 16 meters" ) + box.prop( active_mat.SR_data, "shore_colour" ) + box.prop( active_mat.SR_data, "ocean_colour" ) + #} + #} +#} - # Export strips - # - anim = mdl_animation() - anim.pstr_name = encoder_process_pstr( NLAStrip.action.name ) - anim.rate = 30.0 - anim.offset = len(keyframedata) - anim.length = anim_end-anim_start - - # Export the keyframes - for frame in range(anim_start,anim_end): - #{ - bpy.context.scene.frame_set(frame) - - for bone_name in bones: - #{ - for pb in obj.pose.bones: - #{ - if pb.name != bone_name: continue - - rb = obj.data.bones[ bone_name ] - - # relative bone matrix - if rb.parent is not None: - #{ - offset_mtx = rb.parent.matrix_local - offset_mtx = offset_mtx.inverted_safe() @ \ - rb.matrix_local - - inv_parent = pb.parent.matrix @ offset_mtx - inv_parent.invert_safe() - fpm = inv_parent @ pb.matrix - #} - else: - #{ - bone_mtx = rb.matrix.to_4x4() - local_inv = rb.matrix_local.inverted_safe() - fpm = bone_mtx @ local_inv @ pb.matrix - #} - - loc, rot, sca = fpm.decompose() - - # local position - final_pos = Vector(( loc[0], loc[2], -loc[1] )) - - # rotation - lc_m = pb.matrix_channel.to_3x3() - if pb.parent is not None: - #{ - smtx = pb.parent.matrix_channel.to_3x3() - lc_m = smtx.inverted() @ lc_m - #} - rq = lc_m.to_quaternion() - - kf = mdl_keyframe() - kf.co[0] = final_pos[0] - kf.co[1] = final_pos[1] - kf.co[2] = final_pos[2] - - kf.q[0] = rq[1] - kf.q[1] = rq[3] - kf.q[2] = -rq[2] - kf.q[3] = rq[0] - - # scale - kf.s[0] = sca[0] - kf.s[1] = sca[2] - kf.s[2] = sca[1] - - keyframedata += [kf] - break - #} - #} - #} - - # Add to animation buffer - # - animdata += [anim] - node_def['anim_count'] += 1 +def sr_get_type_enum( scene, context ): +#{ + items = [('none','None',"")] + mesh_entities=['ent_gate'] + point_entities=['ent_spawn','ent_route_node','ent_route'] - # Report progress - # - status_name = F" " + " |"*(node_def['depth']-1) - print( F"{status_name} | *anim: {NLAStrip.action.name}" ) - #} + for e in point_entities: items += [(e,e,'')] + + if context.scene.SR_data.panel == 'ENTITY': #{ + if context.active_object.type == 'MESH': #{ + for e in mesh_entities: items += [(e,e,'')] #} - - # Restore context to how it was before - # - bpy.context.scene.frame_set( previous_frame ) - obj.animation_data.action = previous_action - obj.data.pose_position = POSE_OR_REST_CACHE #} + else: #{ + for e in mesh_entities: items += [(e,e,'')] + #} + + return items +#} + +def sr_on_type_change( _, context ): +#{ + obj = context.active_object + ent_type = obj.SR_data.ent_type + if ent_type == 'none': return + if obj.type == 'MESH':#{ + col = getattr( obj.data.SR_data, ent_type, None ) + if col != None and len(col)==0: col.add() + #} + + col = getattr( obj.SR_data, ent_type, None ) + if col != None and len(col)==0: col.add() #} -# We are trying to compile this node_def -# -def encoder_process_definition( node_def ): +class SR_OBJECT_ENT_SPAWN(bpy.types.PropertyGroup): #{ - global g_encoder + alias: bpy.props.StringProperty( name='alias' ) +#} - # data sources for object/bone are taken differently - # - if 'obj' in node_def: - #{ - obj = node_def['obj'] - obj_type = obj.type - obj_co = obj.matrix_world @ Vector((0,0,0)) +class SR_OBJECT_ENT_GATE(bpy.types.PropertyGroup): +#{ + target: bpy.props.PointerProperty( \ + type=bpy.types.Object, name="destination", \ + poll=lambda self,obj: sr_filter_ent_type(obj,'ent_gate')) +#} - if obj_type == 'ARMATURE': - obj_classtype = 'classtype_skeleton' - elif obj_type == 'LIGHT': - #{ - obj_classtype = 'classtype_world_light' - #} - else: - #{ - obj_classtype = obj.cv_data.classtype - - # Check for armature deform - # - for mod in obj.modifiers: - #{ - if mod.type == 'ARMATURE': - #{ - obj_classtype = 'classtype_skin' - - # Make sure to freeze armature in rest while we collect - # vertex information - # - armature_def = g_encoder['graph_lookup'][mod.object] - POSE_OR_REST_CACHE = armature_def['obj'].data.pose_position - armature_def['obj'].data.pose_position = 'REST' - node_def['linked_armature'] = armature_def - break - #} - #} - #} - #} +class SR_MESH_ENT_GATE(bpy.types.PropertyGroup): +#{ + dimensions: bpy.props.FloatVectorProperty(name="dimensions",size=3) +#} - elif 'bone' in node_def: - #{ - obj = node_def['bone'] - obj_type = 'BONE' - obj_co = obj.head_local - obj_classtype = 'classtype_bone' - #} +class SR_OBJECT_ENT_ROUTE_ENTRY(bpy.types.PropertyGroup): +#{ + target: bpy.props.PointerProperty( \ + type=bpy.types.Object, name='target', \ + poll=lambda self,obj: sr_filter_ent_type(obj,'ent_gate')) +#} - # Create node - # - node = mdl_node() - node.pstr_name = encoder_process_pstr( obj.name ) +class SR_UL_ROUTE_NODE_LIST(bpy.types.UIList): +#{ + bl_idname = 'SR_UL_ROUTE_NODE_LIST' - if node_def["parent"]: - node.parent = node_def["parent"]["uid"] + def draw_item(_,context,layout,data,item,icon,active_data,active_propname): + #{ + layout.prop( item, 'target', text='', emboss=False ) + #} +#} - # Setup transform - # - node.co[0] = obj_co[0] - node.co[1] = obj_co[2] - node.co[2] = -obj_co[1] - - # Convert rotation quat to our space type - # - quat = obj.matrix_local.to_quaternion() - node.q[0] = quat[1] - node.q[1] = quat[3] - node.q[2] = -quat[2] - node.q[3] = quat[0] +class SR_OT_ROUTE_LIST_NEW_ITEM(bpy.types.Operator): +#{ + bl_idname = "skaterift.new_entry" + bl_label = "Add gate" - # Bone scale is just a vector to the tail - # - if obj_type == 'BONE': - #{ - node.s[0] = obj.tail_local[0] - node.co[0] - node.s[1] = obj.tail_local[2] - node.co[1] - node.s[2] = -obj.tail_local[1] - node.co[2] + def execute(self, context):#{ + active_object = context.active_object + active_object.SR_data.ent_route[0].gates.add() + return{'FINISHED'} #} - else: - #{ - node.s[0] = obj.scale[0] - node.s[1] = obj.scale[2] - node.s[2] = obj.scale[1] +#} + +class SR_OT_ROUTE_LIST_DEL_ITEM(bpy.types.Operator): +#{ + bl_idname = "skaterift.del_entry" + bl_label = "Remove gate" + + @classmethod + def poll(cls, context):#{ + active_object = context.active_object + if obj_ent_type == 'ent_gate':#{ + return active_object.SR_data.ent_route[0].gates + #} + else: return False #} - # Report status - # - tot_uid = g_encoder['uid_count']-1 - obj_uid = node_def['uid'] - obj_depth = node_def['depth']-1 + def execute(self, context):#{ + active_object = context.active_object + lista = active_object.SR_data.ent_route[0].gates + index = active_object.SR_data.ent_route[0].gates_index + lista.remove(index) + active_object.SR_data.ent_route[0].gates_index = \ + min(max(0, index-1), len(lista) - 1) + return{'FINISHED'} + #} +#} - status_id = F" [{obj_uid: 3}/{tot_uid}]" + " |"*obj_depth - status_name = status_id + F" L {obj.name}" +class SR_OBJECT_ENT_ROUTE(bpy.types.PropertyGroup): +#{ + gates: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_ROUTE_ENTRY) + gates_index: bpy.props.IntProperty() - if obj_classtype != 'classtype_none': status_type = obj_classtype - else: status_type = obj_type + colour: bpy.props.FloatVectorProperty( \ + name="Colour",\ + subtype='COLOR',\ + min=0.0,max=1.0,\ + default=Vector((0.79,0.63,0.48)),\ + description="Route colour"\ + ) - status_parent = F"{node.parent: 3}" - status_armref = "" + alias: bpy.props.StringProperty(\ + name="Alias",\ + default="Untitled Course") - if obj_classtype == 'classtype_skin': - status_armref = F" [armature -> {armature_def['obj'].cv_data.uid}]" + @staticmethod + def sr_inspector( layout, data ): + #{ + layout.prop( data[0], 'alias' ) + layout.prop( data[0], 'colour' ) - print(F"{status_name:<32} {status_type:<22} {status_parent} {status_armref}") + layout.label( text='Checkpoints' ) + layout.template_list('SR_UL_ROUTE_NODE_LIST', 'Checkpoints', \ + data[0], 'gates', data[0], 'gates_index', rows=5) - # Process mesh if needed - # - if obj_type == 'MESH': - #{ - encoder_compile_mesh( node, node_def ) - #} - elif obj_type == 'ARMATURE': - #{ - encoder_compile_armature( node, node_def ) + row = layout.row() + row.operator( 'skaterift.new_entry', text='Add' ) + row.operator( 'skaterift.del_entry', text='Remove' ) #} +#} + +class SR_OBJECT_PROPERTIES(bpy.types.PropertyGroup): +#{ + ent_gate: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_GATE) + ent_spawn: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_SPAWN) + ent_route: bpy.props.CollectionProperty(type=SR_OBJECT_ENT_ROUTE) - encoder_compile_ent_as( obj_classtype, node, node_def ) + ent_type: bpy.props.EnumProperty( + name="Type", + items=[('none', 'None', '', 0), + ('ent_gate','Gate','', 1), + ('ent_spawn','Spawn','', 2), + ('ent_route_node', 'Route Node', '', 3 ), + ('ent_route', 'Route', '', 4)], + update=sr_on_type_change + ) +#} - # Make sure to reset the armature we just mucked about with - # - if obj_classtype == 'classtype_skin': - armature_def['obj'].data.pose_position = POSE_OR_REST_CACHE +class SR_MESH_PROPERTIES(bpy.types.PropertyGroup): +#{ + ent_gate: bpy.props.CollectionProperty(type=SR_MESH_ENT_GATE) +#} - g_encoder['data']['node'] += [node] +class SR_LIGHT_PROPERTIES(bpy.types.PropertyGroup): +#{ + daytime: bpy.props.BoolProperty( name='Daytime' ) #} -# The post processing step or the pre processing to the writing step -# -def encoder_write_to_file( path ): +class SR_BONE_PROPERTIES(bpy.types.PropertyGroup): #{ - global g_encoder - - # Compile down to a byte array - # - header = g_encoder['header'] - file_pos = sizeof(header) - file_data = bytearray() - print( " Compositing data arrays" ) - - for array_name in g_encoder['data']: - #{ - file_pos += bytearray_align_to( file_data, 16, sizeof(header) ) - arr = g_encoder['data'][array_name] + collider: bpy.props.EnumProperty( name='Collider Type', + items=[('0','none',''), + ('1','box',''), + ('2','capsule','')]) - setattr( header, array_name + "_offset", file_pos ) + collider_min: bpy.props.FloatVectorProperty( name='Collider Min', size=3 ) + collider_max: bpy.props.FloatVectorProperty( name='Collider Max', size=3 ) - print( F" {array_name:<16} @{file_pos:> 8X}[{len(arr)}]" ) + cone_constraint: bpy.props.BoolProperty( name='Cone constraint' ) - if isinstance( arr, bytearray ): - #{ - setattr( header, array_name + "_size", len(arr) ) + conevx: bpy.props.FloatVectorProperty( name='vx' ) + conevy: bpy.props.FloatVectorProperty( name='vy' ) + coneva: bpy.props.FloatVectorProperty( name='va' ) + conet: bpy.props.FloatProperty( name='t' ) - file_data.extend( arr ) - file_pos += len(arr) - #} - else: - #{ - setattr( header, array_name + "_count", len(arr) ) + @staticmethod + def sr_inspector( layout, data ): + #{ + data = data[0] + box = layout.box() + box.prop( data, 'collider' ) - for item in arr: - #{ - bbytes = bytearray(item) - file_data.extend( bbytes ) - file_pos += sizeof(item) - #} + if int(data.collider)>0:#{ + row = box.row() + row.prop( data, 'collider_min' ) + row = box.row() + row.prop( data, 'collider_max' ) + #} + + box = layout.box() + box.prop( data, 'cone_constraint' ) + if data.cone_constraint:#{ + row = box.row() + row.prop( data, 'conevx' ) + row = box.row() + row.prop( data, 'conevy' ) + row = box.row() + row.prop( data, 'coneva' ) + box.prop( data, 'conet' ) #} #} - - # This imperitive for this field to be santized in the future! - # - header.file_length = file_pos - - print( " Writing file" ) - # Write header and data chunk to file - # - fp = open( path, "wb" ) - fp.write( bytearray( header ) ) - fp.write( file_data ) - fp.close() #} -# Main compiler, uses string as the identifier for the collection -# -def write_model(collection_name): +class SR_MATERIAL_PROPERTIES(bpy.types.PropertyGroup): #{ - global g_encoder - print( F"Model graph | Create mode '{collection_name}'" ) - folder = bpy.path.abspath(bpy.context.scene.cv_data.export_dir) - path = F"{folder}{collection_name}.mdl" - print( path ) - - collection = bpy.data.collections[collection_name] - - encoder_init( collection ) - encoder_build_scene_graph( collection ) - - # Compile - # - print( " Comping objects" ) - it = encoder_graph_iterator( g_encoder['scene_graph'] ) - for node_def in it: - encoder_process_definition( node_def ) - - # Write - # - encoder_write_to_file( path ) + shader: bpy.props.EnumProperty( + name="Format", + items = [ + ('standard',"standard",''), + ('standard_cutout', "standard_cutout", ''), + ('terrain_blend', "terrain_blend", ''), + ('vertex_blend', "vertex_blend", ''), + ('water',"water",'') + ]) - print( F"Completed {collection_name}.mdl" ) + surface_prop: bpy.props.EnumProperty( + name="Surface Property", + items = [ + ('0','concrete',''), + ('1','wood',''), + ('2','grass',''), + ('3','tiles','') + ]) + + collision: bpy.props.BoolProperty( \ + name="Collisions Enabled",\ + default=True,\ + description = "Can the player collide with this material"\ + ) + skate_surface: bpy.props.BoolProperty( \ + name="Skate Surface", \ + default=True,\ + description = "Should the game try to target this surface?" \ + ) + grind_surface: bpy.props.BoolProperty( \ + name="Grind Surface", \ + default=False,\ + description = "Grind face?" \ + ) + grow_grass: bpy.props.BoolProperty( \ + name="Grow Grass", \ + default=False,\ + description = "Spawn grass sprites on this surface?" \ + ) + blend_offset: bpy.props.FloatVectorProperty( \ + name="Blend Offset", \ + size=2, \ + default=Vector((0.5,0.0)),\ + description="When surface is more than 45 degrees, add this vector " +\ + "to the UVs" \ + ) + sand_colour: bpy.props.FloatVectorProperty( \ + name="Sand Colour",\ + subtype='COLOR',\ + min=0.0,max=1.0,\ + default=Vector((0.79,0.63,0.48)),\ + description="Blend to this colour near the 0 coordinate on UP axis"\ + ) + shore_colour: bpy.props.FloatVectorProperty( \ + name="Shore Colour",\ + subtype='COLOR',\ + min=0.0,max=1.0,\ + default=Vector((0.03,0.32,0.61)),\ + description="Water colour at the shoreline"\ + ) + ocean_colour: bpy.props.FloatVectorProperty( \ + name="Ocean Colour",\ + subtype='COLOR',\ + min=0.0,max=1.0,\ + default=Vector((0.0,0.006,0.03)),\ + description="Water colour in the deep bits"\ + ) #} # ---------------------------------------------------------------------------- # @@ -2221,12 +1881,12 @@ def cv_draw_halfsphere( pos, tx, ty, tz, radius, colour ): # Draw transformed -1 -> 1 cube # -def cv_draw_ucube( transform, colour ): +def cv_draw_ucube( transform, colour, s=Vector((1,1,1)), o=Vector((0,0,0)) ): #{ global cv_view_verts, cv_view_colours - a = Vector((-1,-1,-1)) - b = Vector((1,1,1)) + a = o + -1.0 * s + b = o + 1.0 * s vs = [None]*8 vs[0] = transform @ Vector((a[0], a[1], a[2])) @@ -2247,7 +1907,7 @@ def cv_draw_ucube( transform, colour ): v1 = vs[l[1]] cv_view_verts += [(v0[0],v0[1],v0[2])] cv_view_verts += [(v1[0],v1[1],v1[2])] - cv_view_colours += [(0,1,0,1),(0,1,0,1)] + cv_view_colours += [colour, colour] #} cv_draw_lines() #} @@ -2301,7 +1961,7 @@ def cv_tangent_basis( n, tx, ty ): # Draw coloured arrow # -def cv_draw_arrow( p0, p1, c0 ): +def cv_draw_arrow( p0, p1, c0, size=0.15 ): #{ global cv_view_verts, cv_view_colours @@ -2313,11 +1973,28 @@ def cv_draw_arrow( p0, p1, c0 ): ty = Vector((1,0,0)) cv_tangent_basis( n, tx, ty ) - cv_view_verts += [p0,p1, midpt+(tx-n)*0.15,midpt, midpt+(-tx-n)*0.15,midpt ] + cv_view_verts += [p0,p1, midpt+(tx-n)*size,midpt, midpt+(-tx-n)*size,midpt ] cv_view_colours += [c0,c0,c0,c0,c0,c0] cv_draw_lines() #} +def cv_draw_line_dotted( p0, p1, c0, dots=10 ): +#{ + global cv_view_verts, cv_view_colours + + for i in range(dots):#{ + t0 = i/dots + t1 = (i+0.25)/dots + + p2 = p0*(1.0-t0)+p1*t0 + p3 = p0*(1.0-t1)+p1*t1 + + cv_view_verts += [p2,p3] + cv_view_colours += [c0,c0] + #} + cv_draw_lines() +#} + # Drawhandles of a bezier control point # def cv_draw_bhandle( obj, direction, colour ): @@ -2411,8 +2088,7 @@ def draw_limit( obj, center, major, minor, amin, amax, colour ): ay = major*f ax = minor*f - for x in range(16): - #{ + for x in range(16):#{ t0 = x/16 t1 = (x+1)/16 a0 = amin*(1.0-t0)+amax*t0 @@ -2426,13 +2102,11 @@ def draw_limit( obj, center, major, minor, amin, amax, colour ): cv_view_verts += [p0,p1] cv_view_colours += [colour,colour] - if x == 0: - #{ + if x == 0:#{ cv_view_verts += [p0,center] cv_view_colours += [colour,colour] #} - if x == 15: - #{ + if x == 15:#{ cv_view_verts += [p1,center] cv_view_colours += [colour,colour] #} @@ -2457,8 +2131,7 @@ def draw_cone_twist( center, vx, vy, va ): cv_view_verts += [center, center+va*size] cv_view_colours += [ (1,1,1,1), (1,1,1,1) ] - for x in range(32): - #{ + for x in range(32):#{ t0 = (x/32) * math.tau t1 = ((x+1)/32) * math.tau @@ -2486,20 +2159,20 @@ def draw_skeleton_helpers( obj ): #{ global cv_view_verts, cv_view_colours - if obj.data.pose_position != 'REST': - #{ + if obj.data.pose_position != 'REST':#{ return #} - for bone in obj.data.bones: - #{ + for bone in obj.data.bones:#{ c = bone.head_local - a = Vector((bone.cv_data.v0[0], bone.cv_data.v0[1], bone.cv_data.v0[2])) - b = Vector((bone.cv_data.v1[0], bone.cv_data.v1[1], bone.cv_data.v1[2])) - - if bone.cv_data.collider == 'collider_box': - #{ - + a = Vector((bone.SR_data.collider_min[0], + bone.SR_data.collider_min[1], + bone.SR_data.collider_min[2])) + b = Vector((bone.SR_data.collider_max[0], + bone.SR_data.collider_max[1], + bone.SR_data.collider_max[2])) + + if bone.SR_data.collider == '1':#{ vs = [None]*8 vs[0]=obj.matrix_world@Vector((c[0]+a[0],c[1]+a[1],c[2]+a[2])) vs[1]=obj.matrix_world@Vector((c[0]+a[0],c[1]+b[1],c[2]+a[2])) @@ -2513,8 +2186,7 @@ def draw_skeleton_helpers( obj ): indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\ (0,4),(1,5),(2,6),(3,7)] - for l in indices: - #{ + for l in indices:#{ v0 = vs[l[0]] v1 = vs[l[1]] @@ -2523,566 +2195,318 @@ def draw_skeleton_helpers( obj ): cv_view_colours += [(0.5,0.5,0.5,0.5),(0.5,0.5,0.5,0.5)] #} #} - elif bone.cv_data.collider == 'collider_capsule': - #{ + elif bone.SR_data.collider == '2':#{ v0 = b-a major_axis = 0 largest = -1.0 - for i in range(3): - #{ - if abs(v0[i]) > largest: - #{ - largest = abs(v0[i]) - major_axis = i - #} - #} - - v1 = Vector((0,0,0)) - v1[major_axis] = 1.0 - - tx = Vector((0,0,0)) - ty = Vector((0,0,0)) - - cv_tangent_basis( v1, tx, ty ) - r = (abs(tx.dot( v0 )) + abs(ty.dot( v0 ))) * 0.25 - l = v0[ major_axis ] - r*2 - - p0 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l*-0.5 ) - p1 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l* 0.5 ) - - colour = [0.2,0.2,0.2,1.0] - colour[major_axis] = 0.5 - - cv_draw_halfsphere( p0, -v1, ty, tx, r, colour ) - cv_draw_halfsphere( p1, v1, ty, tx, r, colour ) - cv_draw_line( p0+tx* r, p1+tx* r, colour ) - cv_draw_line( p0+tx*-r, p1+tx*-r, colour ) - cv_draw_line( p0+ty* r, p1+ty* r, colour ) - cv_draw_line( p0+ty*-r, p1+ty*-r, colour ) - #} - else: - #{ - continue - #} - - center = obj.matrix_world @ c - if bone.cv_data.con0: - #{ - vx = Vector([bone.cv_data.conevx[_] for _ in range(3)]) - vy = Vector([bone.cv_data.conevy[_] for _ in range(3)]) - va = Vector([bone.cv_data.coneva[_] for _ in range(3)]) - draw_cone_twist( center, vx, vy, va ) - - #draw_limit( obj, c, Vector((0,0,1)),Vector((0,-1,0)), \ - # bone.cv_data.mins[0], bone.cv_data.maxs[0], \ - # (1,0,0,1)) - #draw_limit( obj, c, Vector((0,-1,0)),Vector((1,0,0)), \ - # bone.cv_data.mins[1], bone.cv_data.maxs[1], \ - # (0,1,0,1)) - #draw_limit( obj, c, Vector((1,0,0)),Vector((0,0,1)), \ - # bone.cv_data.mins[2], bone.cv_data.maxs[2], \ - # (0,0,1,1)) - #} - #} -#} - -def cv_draw(): -#{ - global cv_view_shader - global cv_view_verts - global cv_view_colours - global cv_view_course_i - - cv_view_course_i = 0 - cv_view_verts = [] - cv_view_colours = [] - - cv_view_shader.bind() - gpu.state.depth_mask_set(False) - gpu.state.line_width_set(2.0) - gpu.state.face_culling_set('BACK') - gpu.state.depth_test_set('LESS') - gpu.state.blend_set('NONE') - - for obj in bpy.context.collection.objects: - #{ - if obj.type == 'ARMATURE': - #{ - if obj.data.pose_position == 'REST': - draw_skeleton_helpers( obj ) - #} - else: - #{ - classtype = obj.cv_data.classtype - if (classtype != 'classtype_none') and (classtype in globals()): - #{ - cl = globals()[ classtype ] - - if getattr( cl, "draw_scene_helpers", None ): - #{ - cl.draw_scene_helpers( obj ) - #} - #} - #} - #} - - cv_draw_lines() - return -#} - - -# ---------------------------------------------------------------------------- # -# # -# Blender # -# # -# ---------------------------------------------------------------------------- # - -# Checks whether this object has a classtype assigned. we can only target other -# classes -def cv_poll_target(scene, obj): -#{ - if obj == bpy.context.active_object: - return False - if obj.cv_data.classtype == 'classtype_none': - return False + for i in range(3):#{ + if abs(v0[i]) > largest:#{ + largest = abs(v0[i]) + major_axis = i + #} + #} - return True -#} + v1 = Vector((0,0,0)) + v1[major_axis] = 1.0 -class CV_MESH_SETTINGS(bpy.types.PropertyGroup): -#{ - v0: bpy.props.FloatVectorProperty(name="v0",size=3) - v1: bpy.props.FloatVectorProperty(name="v1",size=3) - v2: bpy.props.FloatVectorProperty(name="v2",size=3) - v3: bpy.props.FloatVectorProperty(name="v3",size=3) -#} + tx = Vector((0,0,0)) + ty = Vector((0,0,0)) -class CV_LIGHT_SETTINGS(bpy.types.PropertyGroup): -#{ - bp0: bpy.props.BoolProperty( name="bp0" ); -#} + cv_tangent_basis( v1, tx, ty ) + r = (abs(tx.dot( v0 )) + abs(ty.dot( v0 ))) * 0.25 + l = v0[ major_axis ] - r*2 -class CV_LIGHT_PANEL(bpy.types.Panel): -#{ - bl_label="[Skate Rift]" - bl_idname="SCENE_PT_cv_light" - bl_space_type='PROPERTIES' - bl_region_type='WINDOW' - bl_context='data' + p0 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l*-0.5 ) + p1 = obj.matrix_world@Vector( c + (a+b)*0.5 + v1*l* 0.5 ) - def draw(_,context): - #{ - active_object = context.active_object - if active_object == None: return + colour = [0.2,0.2,0.2,1.0] + colour[major_axis] = 0.5 - if active_object.type != 'LIGHT': return + cv_draw_halfsphere( p0, -v1, ty, tx, r, colour ) + cv_draw_halfsphere( p1, v1, ty, tx, r, colour ) + cv_draw_line( p0+tx* r, p1+tx* r, colour ) + cv_draw_line( p0+tx*-r, p1+tx*-r, colour ) + cv_draw_line( p0+ty* r, p1+ty* r, colour ) + cv_draw_line( p0+ty*-r, p1+ty*-r, colour ) + #} + else:#{ + continue + #} - data = active_object.data.cv_data - _.layout.prop( data, "bp0", text="Only on during night" ) + center = obj.matrix_world @ c + if bone.SR_data.cone_constraint:#{ + vx = Vector([bone.SR_data.conevx[_] for _ in range(3)]) + vy = Vector([bone.SR_data.conevy[_] for _ in range(3)]) + va = Vector([bone.SR_data.coneva[_] for _ in range(3)]) + draw_cone_twist( center, vx, vy, va ) + #} #} #} -class CV_OBJ_SETTINGS(bpy.types.PropertyGroup): +def cv_ent_gate( obj ): #{ - uid: bpy.props.IntProperty( name="" ) - - strp: bpy.props.StringProperty( name="strp" ) - intp: bpy.props.IntProperty( name="intp" ) - fltp: bpy.props.FloatProperty( name="fltp" ) - bp0: bpy.props.BoolProperty( name="bp0" ) - bp1: bpy.props.BoolProperty( name="bp1" ) - bp2: bpy.props.BoolProperty( name="bp2" ) - bp3: bpy.props.BoolProperty( name="bp3" ) - - target: bpy.props.PointerProperty( type=bpy.types.Object, name="target", \ - poll=cv_poll_target ) - target1: bpy.props.PointerProperty( type=bpy.types.Object, name="target1", \ - poll=cv_poll_target ) - target2: bpy.props.PointerProperty( type=bpy.types.Object, name="target2", \ - poll=cv_poll_target ) - target3: bpy.props.PointerProperty( type=bpy.types.Object, name="target3", \ - poll=cv_poll_target ) + global cv_view_verts, cv_view_colours - colour: bpy.props.FloatVectorProperty( name="colour",subtype='COLOR',\ - min=0.0,max=1.0) + if obj.type != 'MESH': return - classtype: bpy.props.EnumProperty( - name="Format", - items = [ - ('classtype_none', "classtype_none", "", 0), - ('classtype_gate', "classtype_gate", "", 1), - ('classtype_spawn', "classtype_spawn", "", 3), - ('classtype_water', "classtype_water", "", 4), - ('classtype_route_node', "classtype_route_node", "", 8 ), - ('classtype_route', "classtype_route", "", 9 ), - ('classtype_audio',"classtype_audio","",14), - ('classtype_trigger',"classtype_trigger","",100), - ('classtype_logic_achievement',"classtype_logic_achievement","",101), - ('classtype_logic_relay',"classtype_logic_relay","",102), - ('classtype_spawn_link',"classtype_spawn_link","",150), - ('classtype_nonlocal_gate', "classtype_nonlocal_gate", "", 300) - ]) + mesh_data = obj.data.SR_data.ent_gate[0] + data = obj.SR_data.ent_gate[0] + dims = mesh_data.dimensions - audio_format: bpy.props.EnumProperty( - name="Loaded format", - items = [ - ('mono', "mono", "", 0), - ('stereo', "stereo", "", 1), - ('remain compressed', "remain compressed", "", 2) - ]) -#} + vs = [None]*9 + c = Vector((0,0,dims[2])) -class CV_BONE_SETTINGS(bpy.types.PropertyGroup): -#{ - collider: bpy.props.EnumProperty( - name="Collider Type", - items = [ - ('collider_none', "collider_none", "", 0), - ('collider_box', "collider_box", "", 1), - ('collider_capsule', "collider_capsule", "", 2), - ]) + vs[0] = obj.matrix_world @ Vector((-dims[0],0.0,-dims[1]+dims[2])) + vs[1] = obj.matrix_world @ Vector((-dims[0],0.0, dims[1]+dims[2])) + vs[2] = obj.matrix_world @ Vector(( dims[0],0.0, dims[1]+dims[2])) + vs[3] = obj.matrix_world @ Vector(( dims[0],0.0,-dims[1]+dims[2])) + vs[4] = obj.matrix_world @ (c+Vector((-1,0,-2))) + vs[5] = obj.matrix_world @ (c+Vector((-1,0, 2))) + vs[6] = obj.matrix_world @ (c+Vector(( 1,0, 2))) + vs[7] = obj.matrix_world @ (c+Vector((-1,0, 0))) + vs[8] = obj.matrix_world @ (c+Vector(( 1,0, 0))) - v0: bpy.props.FloatVectorProperty(name="v0",size=3) - v1: bpy.props.FloatVectorProperty(name="v1",size=3) + indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(7,8)] - con0: bpy.props.BoolProperty(name="Constriant 0",default=False) - mins: bpy.props.FloatVectorProperty(name="mins",size=3) - maxs: bpy.props.FloatVectorProperty(name="maxs",size=3) + for l in indices:#{ + v0 = vs[l[0]] + v1 = vs[l[1]] + cv_view_verts += [(v0[0],v0[1],v0[2])] + cv_view_verts += [(v1[0],v1[1],v1[2])] + cv_view_colours += [(1,1,0,1),(1,1,0,1)] + #} - conevx: bpy.props.FloatVectorProperty(name="conevx",size=3) - conevy: bpy.props.FloatVectorProperty(name="conevy",size=3) - coneva: bpy.props.FloatVectorProperty(name="coneva",size=3) - conet: bpy.props.FloatProperty(name="conet") + sw = (0.4,0.4,0.4,0.2) + if data.target != None: + cv_draw_arrow( obj.location, data.target.location, sw ) #} -class CV_BONE_PANEL(bpy.types.Panel): +def dijkstra( graph, start_node, target_node ): #{ - bl_label="[Skate Rift]" - bl_idname="SCENE_PT_cv_bone" - bl_space_type='PROPERTIES' - bl_region_type='WINDOW' - bl_context='bone' - - def draw(_,context): - #{ - active_object = context.active_object - if active_object == None: return - - bone = active_object.data.bones.active - if bone == None: return + unvisited = [_ for _ in graph] + shortest_path = {} + previous_nodes = {} + + for n in unvisited: + shortest_path[n] = 9999999.999999 + shortest_path[start_node] = 0 + + while unvisited:#{ + current_min_node = None + for n in unvisited:#{ + if current_min_node == None: + current_min_node = n + elif shortest_path[n] < shortest_path[current_min_node]: + current_min_node = n + #} - _.layout.prop( bone.cv_data, "collider" ) - _.layout.prop( bone.cv_data, "v0" ) - _.layout.prop( bone.cv_data, "v1" ) + for branch in graph[current_min_node]:#{ + tentative_value = shortest_path[current_min_node] + tentative_value += graph[current_min_node][branch] + if tentative_value < shortest_path[branch]:#{ + shortest_path[branch] = tentative_value + previous_nodes[branch] = current_min_node + #} + #} - _.layout.label( text="Angle Limits" ) - _.layout.prop( bone.cv_data, "con0" ) + unvisited.remove(current_min_node) + #} + + path = [] + node = target_node + while node != start_node:#{ + path.append(node) - _.layout.prop( bone.cv_data, "conevx" ) - _.layout.prop( bone.cv_data, "conevy" ) - _.layout.prop( bone.cv_data, "coneva" ) - _.layout.prop( bone.cv_data, "conet" ) + if node not in previous_nodes: return None + node = previous_nodes[node] #} -#} -class CV_SCENE_SETTINGS(bpy.types.PropertyGroup): -#{ - use_hidden: bpy.props.BoolProperty( name="use hidden", default=False ) - export_dir: bpy.props.StringProperty( name="Export Dir", subtype='DIR_PATH' ) + # Add the start node manually + path.append(start_node) + return path #} -class CV_COLLECTION_SETTINGS(bpy.types.PropertyGroup): +def node_graph( route_nodes ): #{ - pack_textures: bpy.props.BoolProperty( name="Pack Textures", default=False ) - animations: bpy.props.BoolProperty( name="Export animation", default=True) -#} + graph = {} + for n in route_nodes: + graph[n.name] = {} -class CV_MATERIAL_SETTINGS(bpy.types.PropertyGroup): -#{ - shader: bpy.props.EnumProperty( - name="Format", - items = [ - ('standard',"standard","",0), - ('standard_cutout', "standard_cutout", "", 1), - ('terrain_blend', "terrain_blend", "", 2), - ('vertex_blend', "vertex_blend", "", 3), - ('water',"water","",4), - ]) + for i in range(len(route_nodes)-1):#{ + for j in range(i+1, len(route_nodes)):#{ + ni = route_nodes[i] + nj = route_nodes[j] - surface_prop: bpy.props.EnumProperty( - name="Surface Property", - items = [ - ('concrete','concrete','',0), - ('wood','wood','',1), - ('grass','grass','',2), - ('tiles','tiles','',3) - ]) - - collision: bpy.props.BoolProperty( \ - name="Collisions Enabled",\ - default=True,\ - description = "Can the player collide with this material"\ - ) - skate_surface: bpy.props.BoolProperty( \ - name="Skate Surface", \ - default=True,\ - description = "Should the game try to target this surface?" \ - ) - grind_surface: bpy.props.BoolProperty( \ - name="Grind Surface", \ - default=False,\ - description = "Grind face?" \ - ) - grow_grass: bpy.props.BoolProperty( \ - name="Grow Grass", \ - default=False,\ - description = "Spawn grass sprites on this surface?" \ - ) - blend_offset: bpy.props.FloatVectorProperty( \ - name="Blend Offset", \ - size=2, \ - default=Vector((0.5,0.0)),\ - description="When surface is more than 45 degrees, add this vector " +\ - "to the UVs" \ - ) - sand_colour: bpy.props.FloatVectorProperty( \ - name="Sand Colour",\ - subtype='COLOR',\ - min=0.0,max=1.0,\ - default=Vector((0.79,0.63,0.48)),\ - description="Blend to this colour near the 0 coordinate on UP axis"\ - ) - shore_colour: bpy.props.FloatVectorProperty( \ - name="Shore Colour",\ - subtype='COLOR',\ - min=0.0,max=1.0,\ - default=Vector((0.03,0.32,0.61)),\ - description="Water colour at the shoreline"\ - ) - ocean_colour: bpy.props.FloatVectorProperty( \ - name="Ocean Colour",\ - subtype='COLOR',\ - min=0.0,max=1.0,\ - default=Vector((0.0,0.006,0.03)),\ - description="Water colour in the deep bits"\ - ) -#} + v0 = ni.location - nj.location -class CV_MATERIAL_PANEL(bpy.types.Panel): -#{ - bl_label="Skate Rift material" - bl_idname="MATERIAL_PT_cv_material" - bl_space_type='PROPERTIES' - bl_region_type='WINDOW' - bl_context="material" - - def draw(_,context): - #{ - active_object = bpy.context.active_object - if active_object == None: return - active_mat = active_object.active_material - if active_mat == None: return + gate = None - info = material_info( active_mat ) + if ni.SR_data.ent_type == 'ent_gate': + gate = ni - if 'tex_diffuse' in info: - #{ - _.layout.label( icon='INFO', \ - text=F"{info['tex_diffuse'].name} will be compiled" ) - #} + if nj.SR_data.ent_type == 'ent_gate':#{ + if gate: continue + gate = nj + #} - _.layout.prop( active_mat.cv_data, "shader" ) - _.layout.prop( active_mat.cv_data, "surface_prop" ) - _.layout.prop( active_mat.cv_data, "collision" ) + if gate:#{ + v1 = gate.matrix_world.to_3x3() @ Vector((0,-1,0)) + if gate.SR_data.ent_gate[0].target: + if v1.dot(v0) > 0.0: continue + else: + if v1.dot(v0) < 0.0: continue + #} - if active_mat.cv_data.collision: - _.layout.prop( active_mat.cv_data, "skate_surface" ) - _.layout.prop( active_mat.cv_data, "grind_surface" ) - _.layout.prop( active_mat.cv_data, "grow_grass" ) + dist = v0.magnitude - if active_mat.cv_data.shader == "terrain_blend": - #{ - box = _.layout.box() - box.prop( active_mat.cv_data, "blend_offset" ) - box.prop( active_mat.cv_data, "sand_colour" ) - #} - elif active_mat.cv_data.shader == "vertex_blend": - #{ - box = _.layout.box() - box.label( icon='INFO', text="Uses vertex colours, the R channel" ) - box.prop( active_mat.cv_data, "blend_offset" ) - #} - elif active_mat.cv_data.shader == "water": - #{ - box = _.layout.box() - box.label( icon='INFO', text="Depth scale of 16 meters" ) - box.prop( active_mat.cv_data, "shore_colour" ) - box.prop( active_mat.cv_data, "ocean_colour" ) + if dist > 25.0: continue + graph[route_nodes[i].name][route_nodes[j].name] = dist + graph[route_nodes[j].name][route_nodes[i].name] = dist #} #} + + return graph #} -class CV_OBJ_PANEL(bpy.types.Panel): +def cv_draw_route( route, route_nodes ): #{ - bl_label="Entity Config" - bl_idname="SCENE_PT_cv_entity" - bl_space_type='PROPERTIES' - bl_region_type='WINDOW' - bl_context="object" - - def draw(_,context): - #{ - active_object = bpy.context.active_object - if active_object == None: return - if active_object.type == 'ARMATURE': - #{ - row = _.layout.row() - row.enabled = False - row.label( text="This object has the intrinsic classtype of skeleton" ) - return - #} + pole = Vector((0.2,0.2,20)) + hat = Vector((20,2.0,0.2)) + cc = route.SR_data.ent_route[0].colour - _.layout.prop( active_object.cv_data, "classtype" ) + cv_draw_ucube( route.matrix_world, cc, Vector((20,1,10)) ) + cv_draw_ucube( route.matrix_world, cc, pole, Vector((-20,1,-10)) ) + cv_draw_ucube( route.matrix_world, cc, pole, Vector(( 20,1,-10)) ) + cv_draw_ucube( route.matrix_world, cc, hat, Vector((0,-1, 10)) ) + cv_draw_ucube( route.matrix_world, cc, hat, Vector((0,-1,-10)) ) - classtype = active_object.cv_data.classtype + checkpoints = route.SR_data.ent_route[0].gates + graph = node_graph( route_nodes ) - if (classtype != 'classtype_none') and (classtype in globals()): - #{ - cl = globals()[ classtype ] + for i in range(len(checkpoints)):#{ + gi = checkpoints[i].target + gj = checkpoints[(i+1)%len(checkpoints)].target - if getattr( cl, "editor_interface", None ): - #{ - cl.editor_interface( _.layout, active_object ) - #} + if gi:#{ + dest = gi.SR_data.ent_gate[0].target + if dest: + cv_draw_line_dotted( gi.location, dest.location, cc ) + gi = dest #} - #} -#} -class CV_COMPILE(bpy.types.Operator): -#{ - bl_idname="carve.compile_all" - bl_label="Compile All" + if gi==gj: continue # error? + if not gi or not gj: continue - def execute(_,context): - #{ - view_layer = bpy.context.view_layer - for col in view_layer.layer_collection.children["export"].children: - if not col.hide_viewport or bpy.context.scene.cv_data.use_hidden: - write_model( col.name ) + path = dijkstra( graph, gj.name, gi.name ) - return {'FINISHED'} + if path:#{ + for sj in range(len(path)-1):#{ + o0 = bpy.data.objects[ path[sj] ] + o1 = bpy.data.objects[ path[sj+1] ] + cv_draw_arrow(o0.location,o1.location,cc,1.5) + #} + #} + else:#{ + cv_draw_line_dotted( gi.location, gj.location, cc ) + #} #} #} -class CV_COMPILE_THIS(bpy.types.Operator): +def cv_draw(): #{ - bl_idname="carve.compile_this" - bl_label="Compile This collection" - - def execute(_,context): - #{ - col = bpy.context.collection - write_model( col.name ) + global cv_view_shader + global cv_view_verts + global cv_view_colours + global cv_view_course_i - return {'FINISHED'} - #} -#} + cv_view_course_i = 0 + cv_view_verts = [] + cv_view_colours = [] -class CV_INTERFACE(bpy.types.Panel): -#{ - bl_idname = "VIEW3D_PT_carve" - bl_label = "Skate Rift" - bl_space_type = 'VIEW_3D' - bl_region_type = 'UI' - bl_category = "Skate Rift" + cv_view_shader.bind() + gpu.state.depth_mask_set(False) + gpu.state.line_width_set(2.0) + gpu.state.face_culling_set('BACK') + gpu.state.depth_test_set('LESS') + gpu.state.blend_set('NONE') - def draw(_, context): - #{ - layout = _.layout - layout.prop( context.scene.cv_data, "export_dir" ) - - col = bpy.context.collection - - found_in_export = False - export_count = 0 - view_layer = bpy.context.view_layer - for c1 in view_layer.layer_collection.children["export"].children: - #{ - if not c1.hide_viewport or bpy.context.scene.cv_data.use_hidden: - export_count += 1 + route_nodes = [] + routes = [] - if c1.name == col.name: - #{ - found_in_export = True - #} + for obj in bpy.context.collection.objects:#{ + if obj.type == 'ARMATURE':#{ + if obj.data.pose_position == 'REST': + draw_skeleton_helpers( obj ) #} + else:#{ + ent_type = obj_ent_type( obj ) - box = layout.box() - if found_in_export: - #{ - box.label( text=col.name + ".mdl" ) - box.prop( col.cv_data, "pack_textures" ) - box.prop( col.cv_data, "animations" ) - box.operator( "carve.compile_this" ) - #} - else: - #{ - row = box.row() - row.enabled=False - row.label( text=col.name ) - box.label( text="This collection is not in the export group" ) + if ent_type == 'ent_gate':#{ + cv_ent_gate( obj ) + route_nodes += [obj] + #} + elif ent_type == 'ent_route_node': + route_nodes += [obj] + elif ent_type == 'ent_route': + routes += [obj] #} - - box = layout.box() - row = box.row() - - split = row.split( factor = 0.3, align=True ) - split.prop( context.scene.cv_data, "use_hidden", text="hidden" ) - - row1 = split.row() - if export_count == 0: - row1.enabled=False - row1.operator( "carve.compile_all", \ - text=F"Compile all ({export_count} collections)" ) #} -#} + + #cv_draw_route_map( route_nodes ) + for route in routes:#{ + cv_draw_route( route, route_nodes ) + #} + cv_draw_lines() + return +#} -classes = [CV_OBJ_SETTINGS,CV_OBJ_PANEL,CV_COMPILE,CV_INTERFACE,\ - CV_MESH_SETTINGS, CV_SCENE_SETTINGS, CV_BONE_SETTINGS,\ - CV_BONE_PANEL, CV_COLLECTION_SETTINGS, CV_COMPILE_THIS,\ - CV_MATERIAL_SETTINGS, CV_MATERIAL_PANEL, CV_LIGHT_SETTINGS,\ - CV_LIGHT_PANEL] +classes = [ SR_INTERFACE, SR_MATERIAL_PANEL,\ + SR_COLLECTION_SETTINGS, SR_SCENE_SETTINGS, \ + SR_COMPILE, SR_COMPILE_THIS, SR_MIRROR_BONE_X,\ + \ + SR_OBJECT_ENT_GATE, SR_MESH_ENT_GATE, SR_OBJECT_ENT_SPAWN, \ + SR_OBJECT_ENT_ROUTE_ENTRY, SR_UL_ROUTE_NODE_LIST, \ + SR_OBJECT_ENT_ROUTE, SR_OT_ROUTE_LIST_NEW_ITEM, + SR_OT_ROUTE_LIST_DEL_ITEM,\ + \ + SR_OBJECT_PROPERTIES, SR_LIGHT_PROPERTIES, SR_BONE_PROPERTIES, + SR_MESH_PROPERTIES, SR_MATERIAL_PROPERTIES \ + ] def register(): #{ - global cv_view_draw_handler - for c in classes: bpy.utils.register_class(c) - bpy.types.Object.cv_data = bpy.props.PointerProperty(type=CV_OBJ_SETTINGS) - bpy.types.Mesh.cv_data = bpy.props.PointerProperty(type=CV_MESH_SETTINGS) - bpy.types.Scene.cv_data = bpy.props.PointerProperty(type=CV_SCENE_SETTINGS) - bpy.types.Bone.cv_data = bpy.props.PointerProperty(type=CV_BONE_SETTINGS) - bpy.types.Collection.cv_data = \ - bpy.props.PointerProperty(type=CV_COLLECTION_SETTINGS) - bpy.types.Material.cv_data = \ - bpy.props.PointerProperty(type=CV_MATERIAL_SETTINGS) - bpy.types.Light.cv_data = bpy.props.PointerProperty(type=CV_LIGHT_SETTINGS) + bpy.types.Scene.SR_data = \ + bpy.props.PointerProperty(type=SR_SCENE_SETTINGS) + bpy.types.Collection.SR_data = \ + bpy.props.PointerProperty(type=SR_COLLECTION_SETTINGS) + + bpy.types.Object.SR_data = \ + bpy.props.PointerProperty(type=SR_OBJECT_PROPERTIES) + bpy.types.Light.SR_data = \ + bpy.props.PointerProperty(type=SR_LIGHT_PROPERTIES) + bpy.types.Bone.SR_data = \ + bpy.props.PointerProperty(type=SR_BONE_PROPERTIES) + bpy.types.Mesh.SR_data = \ + bpy.props.PointerProperty(type=SR_MESH_PROPERTIES) + bpy.types.Material.SR_data = \ + bpy.props.PointerProperty(type=SR_MATERIAL_PROPERTIES) + global cv_view_draw_handler cv_view_draw_handler = bpy.types.SpaceView3D.draw_handler_add(\ cv_draw,(),'WINDOW','POST_VIEW') #} def unregister(): #{ - global cv_view_draw_handler - for c in classes: bpy.utils.unregister_class(c) + global cv_view_draw_handler bpy.types.SpaceView3D.draw_handler_remove(cv_view_draw_handler,'WINDOW') #} @@ -3141,7 +2565,8 @@ def qoi_encode( img ): #{ data = bytearray() - print(F" . Encoding {img.name}.qoi[{img.size[0]},{img.size[1]}]") + print(F"{' ':<30}",end='\r') + print(F"[QOI] Encoding {img.name}.qoi[{img.size[0]},{img.size[1]}]",end='\r') index = [ qoi_rgba_t() for _ in range(64) ] @@ -3167,11 +2592,9 @@ def qoi_encode( img ): px.a = c_uint8(255) px_len = img.size[0] * img.size[1] - paxels = [ int(min(max(_,0),1)*255) for _ in img.pixels ] - for px_pos in range( px_len ): - #{ + for px_pos in range( px_len ): #{ idx = px_pos * img.channels nc = img.channels-1 @@ -3180,39 +2603,32 @@ def qoi_encode( img ): px.b = paxels[idx+min(2,nc)] px.a = paxels[idx+min(3,nc)] - if qoi_eq( px, px_prev ): - #{ + if qoi_eq( px, px_prev ): #{ run += 1 - if (run == 62) or (px_pos == px_len-1): - #{ + if (run == 62) or (px_pos == px_len-1): #{ data.extend( bytearray( c_uint8(QOI_OP_RUN | (run-1))) ) run = 0 #} #} - else: - #{ - if run > 0: - #{ + else: #{ + if run > 0: #{ data.extend( bytearray( c_uint8(QOI_OP_RUN | (run-1))) ) run = 0 #} index_pos = qoi_colour_hash(px) % 64 - if qoi_eq( index[index_pos], px ): - #{ + if qoi_eq( index[index_pos], px ): #{ data.extend( bytearray( c_uint8(QOI_OP_INDEX | index_pos)) ) #} - else: - #{ + else: #{ index[ index_pos ].r = px.r index[ index_pos ].g = px.g index[ index_pos ].b = px.b index[ index_pos ].a = px.a - if px.a == px_prev.a: - #{ + if px.a == px_prev.a: #{ vr = int(px.r) - int(px_prev.r) vg = int(px.g) - int(px_prev.g) vb = int(px.b) - int(px_prev.b) @@ -3236,16 +2652,14 @@ def qoi_encode( img ): data.extend( bytearray( c_uint8(op) ) ) data.extend( bytearray( c_uint8(delta) )) #} - else: - #{ + else: #{ data.extend( bytearray( c_uint8(QOI_OP_RGB) ) ) data.extend( bytearray( c_uint8(px.r) )) data.extend( bytearray( c_uint8(px.g) )) data.extend( bytearray( c_uint8(px.b) )) #} #} - else: - #{ + else: #{ data.extend( bytearray( c_uint8(QOI_OP_RGBA) ) ) data.extend( bytearray( c_uint8(px.r) )) data.extend( bytearray( c_uint8(px.g) )) @@ -3265,7 +2679,7 @@ def qoi_encode( img ): for i in range(7): data.extend( bytearray( c_uint8(0) )) data.extend( bytearray( c_uint8(1) )) - bytearray_align_to( data, 16, 0 ) + bytearray_align_to( data, 16, b'\x00' ) return data #}