X-Git-Url: https://harrygodden.com/git/?a=blobdiff_plain;f=blender_export.py;h=a7e714c2fae4a76661b19f7a9fcfa0e1f9412be1;hb=bdac014448b6ec968fe645f1581f321144f07dba;hp=05f2b6ffc833747ef8d2d9b90b2e5a625a940b21;hpb=fc32ce17923a42f9a0f250e4ab21a08411a41acb;p=carveJwlIkooP6JGAAIwe30JlM.git diff --git a/blender_export.py b/blender_export.py index 05f2b6f..a7e714c 100644 --- a/blender_export.py +++ b/blender_export.py @@ -1,143 +1,1879 @@ -import bpy, math +# +# Copyright (C) 2021-2022 Mt.ZERO Software, Harry Godden - All Rights Reserved +# + +import bpy, math, gpu +import cProfile from ctypes import * +from mathutils import * +from gpu_extras.batch import batch_for_shader -class model(Structure): - _pack_ = 1 - _fields_ = [("identifier",c_uint32), - ("vertex_count",c_uint32), - ("indice_count",c_uint32), - ("layer_count",c_uint32)] +bl_info = { + "name":"Skate Rift model compiler", + "author": "Harry Godden (hgn)", + "version": (0,2), + "blender":(3,1,0), + "location":"Export", + "descriptin":"", + "warning":"", + "wiki_url":"", + "category":"Import/Export", +} -class sdf_primative(Structure): - _pack_ = 1 - _fields_ = [("origin",c_float*4), - ("info",c_float*4)] +class mdl_vert(Structure): # 48 bytes. Quite large. Could compress +#{ # the normals and uvs to i16s. Not an + _pack_ = 1 # real issue, yet. + _fields_ = [("co",c_float*3), + ("norm",c_float*3), + ("uv",c_float*2), + ("colour",c_uint8*4), + ("weights",c_uint16*4), + ("groups",c_uint8*4)] +#} -class submodel(Structure): +class mdl_submesh(Structure): +#{ _pack_ = 1 _fields_ = [("indice_start",c_uint32), ("indice_count",c_uint32), ("vertex_start",c_uint32), ("vertex_count",c_uint32), ("bbx",(c_float*3)*2), - ("sdf",sdf_primative), - ("sdf_type",c_int32), - ("name",c_char*32)] + ("material_id",c_uint32)] # index into the material array +#} + +class mdl_material(Structure): +#{ + _pack_ = 1 + _fields_ = [("pstr_name",c_uint32)] +#} -class model_vert(Structure): +class mdl_node(Structure): +#{ _pack_ = 1 _fields_ = [("co",c_float*3), - ("norm",c_float*3), - ("colour",c_float*4), - ("uv",c_float*2)] + ( "q",c_float*4), + ( "s",c_float*3), + ("sub_uid",c_uint32), # dont use + ("submesh_start",c_uint32), + ("submesh_count",c_uint32), + ("classtype",c_uint32), + ("offset",c_uint32), + ("parent",c_uint32), + ("pstr_name",c_uint32)] +#} -def fixed_string(dest,string): - return - for i in range(len(string)): - dest[i] = string[i] +class mdl_header(Structure): +#{ + _pack_ = 1 + _fields_ = [("identifier",c_uint32), + ("version",c_uint32), + ("file_length",c_uint32), + ("pad0",c_uint32), -def write_model(name): - fp = open(F"/home/harry/Documents/carve/models/{name}.mdl", "wb") - collection = bpy.data.collections[name] - - header = model() - header.identifier = 0xABCD0000 - header.vertex_count = 0 - header.indice_count = 0 - header.layer_count = 0 + ("node_count",c_uint32), + ("node_offset",c_uint32), + + ("submesh_count",c_uint32), + ("submesh_offset",c_uint32), + + ("material_count",c_uint32), + ("material_offset",c_uint32), + + ("anim_count",c_uint32), + ("anim_offset",c_uint32), + + ("entdata_size",c_uint32), + ("entdata_offset",c_uint32), + + ("strings_size",c_uint32), + ("strings_offset",c_uint32), + + ("keyframe_count",c_uint32), + ("keyframe_offset",c_uint32), + + ("vertex_count",c_uint32), + ("vertex_offset",c_uint32), + + ("indice_count",c_uint32), + ("indice_offset",c_uint32),] +#} + +class mdl_animation(Structure): +#{ + _pack_ = 1 + _fields_ = [("pstr_name",c_uint32), + ("length",c_uint32), + ("rate",c_float), + ("offset",c_uint32)] +#} + +class mdl_keyframe(Structure): +#{ + _pack_ = 1 + _fields_ = [("co",c_float*3), + ("q",c_float*4), + ("s",c_float*3)] +#} + +# Entity types +# ========================================== +# +# ctypes _fields_ defines the data which is filled in by: +# def encode_obj( _, node, node_def ): +# +# gizmos get drawn into the viewport via: +# @staticmethod +# def editor_interface( object ): +# + +class classtype_gate(Structure): +#{ + _pack_ = 1 + _fields_ = [("target",c_uint32), + ("dims",c_float*3)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 1 + + obj = node_def['obj'] - layers = [] - vertex_buffer = [] - indice_buffer = [] + if obj.cv_data.target != None: + _.target = obj.cv_data.target.cv_data.uid - for obj in collection.objects: if obj.type == 'MESH': - dgraph = bpy.context.evaluated_depsgraph_get() - data = obj.evaluated_get(dgraph).data - data.calc_loop_triangles() - data.calc_normals_split() - - sm = submodel() - sm.indice_start = header.indice_count - sm.vertex_start = header.vertex_count - sm.vertex_count = len(data.vertices) - sm.indice_count = len(data.loop_triangles)*3 - sm.sdf_type = 0 - for i in range(3): - sm.bbx[0][i] = 999999 - sm.bbx[1][i] = -999999 - - if F"{obj.name}.sdf_cone" in bpy.data.objects: - cone = bpy.data.objects[F"{obj.name}.sdf_cone"] - sm.sdf.origin[0] = cone.location[0] - sm.sdf.origin[1] = cone.location[2] + cone.scale[1]*2.0 - sm.sdf.origin[2] = -cone.location[1] - sm.sdf.origin[3] = 0.0 - - lo = cone.scale[0] - la = cone.scale[1]*2.0 - lh = math.sqrt(lo*lo+la*la) - - sm.sdf.info[0] = lo - sm.sdf.info[1] = la - sm.sdf.info[2] = lo/lh - sm.sdf.info[3] = la/lh - - sm.sdf_type = 1 + #{ + _.dims[0] = obj.data.cv_data.v0[0] + _.dims[1] = obj.data.cv_data.v0[1] + _.dims[2] = obj.data.cv_data.v0[2] + #} + else: + #{ + _.dims[0] = obj.cv_data.v0[0] + _.dims[1] = obj.cv_data.v0[1] + _.dims[2] = obj.cv_data.v0[2] + #} + #} +#} + +class classtype_spawn(Structure): +#{ + _pack_ = 1 + _fields_ = [("pstr_alias",c_uint32)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 3 + _.pstr_alias = encoder_process_pstr( node_def['obj'].cv_data.strp ) + #} +#} + +class classtype_water(Structure): +#{ + _pack_ = 1 + _fields_ = [("temp",c_uint32)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 4 + # no data, spooky + #} +#} + +class classtype_route_node(Structure): +#{ + _pack_ = 1 + _fields_ = [("target",c_uint32), + ("target1",c_uint32)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 8 + obj = node_def['obj'] + + if obj.cv_data.target != None: + _.target = obj.cv_data.target.cv_data.uid + if obj.cv_data.target1 != None: + _.target1 = obj.cv_data.target1.cv_data.uid + #} +#} + +class classtype_route(Structure): +#{ + _pack_ = 1 + _fields_ = [("id_start",c_uint32), + ("colour",c_float*3)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 9 + obj = node_def['obj'] + + _.colour[0] = obj.cv_data.colour[0] + _.colour[1] = obj.cv_data.colour[1] + _.colour[2] = obj.cv_data.colour[2] + + if obj.cv_data.target != None: + _.id_start = obj.cv_data.target.cv_data.uid + #} +#} + +class classtype_skin(Structure): +#{ + _pack_ = 1 + _fields_ = [("skeleton",c_uint32)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 12 + + armature_def = node_def['linked_armature'] + _.skeleton = armature_def['obj'].cv_data.uid + #} +#} + +class classtype_skeleton(Structure): +#{ + _pack_ = 1 + _fields_ = [("channels",c_uint32), + ("ik_count",c_uint32), + ("collider_count",c_uint32), + ("anim_start",c_uint32), + ("anim_count",c_uint32)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 11 + + _.channels = len( node_def['bones'] ) + _.ik_count = node_def['ik_count'] + _.collider_count = node_def['collider_count'] + _.anim_start = node_def['anim_start'] + _.anim_count = node_def['anim_count'] + #} +#} + +class classtype_bone(Structure): +#{ + _pack_ = 1 + _fields_ = [("deform",c_uint32), + ("ik_target",c_uint32), + ("ik_pole",c_uint32), + ("collider",c_uint32), + ("use_limits",c_uint32), + ("angle_limits",(c_float*3)*2), + ("hitbox",(c_float*3)*2)] + + def encode_obj(_, node,node_def): + #{ + node.classtype = 10 + + armature_def = node_def['linked_armature'] + obj = node_def['bone'] + + _.deform = node_def['deform'] + + if 'ik_target' in node_def: + #{ + _.ik_target = armature_def['bones'].index( node_def['ik_target'] ) + _.ik_pole = armature_def['bones'].index( node_def['ik_pole'] ) + #} + + # For ragdolls + # + if obj.cv_data.collider: + #{ + _.collider = 1 + _.hitbox[0][0] = obj.cv_data.v0[0] + _.hitbox[0][1] = obj.cv_data.v0[2] + _.hitbox[0][2] = -obj.cv_data.v1[1] + _.hitbox[1][0] = obj.cv_data.v1[0] + _.hitbox[1][1] = obj.cv_data.v1[2] + _.hitbox[1][2] = -obj.cv_data.v0[1] + #} + + if obj.cv_data.con0: + #{ + _.use_limits = 1 + _.angle_limits[0][0] = obj.cv_data.mins[0] + _.angle_limits[0][1] = obj.cv_data.mins[2] + _.angle_limits[0][2] = -obj.cv_data.maxs[1] + _.angle_limits[1][0] = obj.cv_data.maxs[0] + _.angle_limits[1][1] = obj.cv_data.maxs[2] + _.angle_limits[1][2] = -obj.cv_data.mins[1] + #} + #} +#} + + +# TO BE REPLACED +# +class classtype_achievement_box(Structure): +#{ + _pack_ = 1 + _fields_ = [("pstr_name",c_uint32), + ("trigger",c_uint32)] + + def encode_obj(_, node,node_def ): + #{ + node.classtype = 0 + #} +#} + +class classtype_audio(Structure): +#{ + _pack_ = 1 + _fields_ = [("pstr_file",c_uint32), + ("flags",c_uint32), + ("volume",c_float)] + + def encode_obj(_, node,node_def ): + #{ + node.classtype = 14 + + obj = node_def['obj'] + + _.pstr_file = encoder_process_pstr( obj.cv_data.strp ) + _.flags = obj.cv_data.intp + _.volume = obj.cv_data.fltp + #} + + @staticmethod + def editor_interface(yada): + #{ + pass + #} + + @staticmethod + def draw_scene_helpers(yada): + #{ + pass + #} +#} + + +# Current encoder state +# +g_encoder = None + + +# Reset encoder +# +def encoder_init(): +#{ + global g_encoder + + g_encoder = \ + { + # The actual file header + # + 'header': mdl_header(), + + # Compiled data chunks (each can be read optionally by the client) + # + 'data': + { + #1--------------------------------- + 'node': [], # Metadata 'chunk' + 'submesh': [], + 'material': [], + 'anim': [], + 'entdata': bytearray(), # variable width + 'strings': bytearray(), # . + #2--------------------------------- + 'keyframe': [], # Animations + #3--------------------------------- + 'vertex': [], # Mesh data + 'indice': [], + }, + + # All objects of the model in their final heirachy + # + "uid_count": 1, + "scene_graph":{}, + "graph_lookup":{}, + + # Allows us to reuse definitions + # + 'string_cache':{}, + 'mesh_cache': {}, + 'material_cache': {}, + } + + g_encoder['header'].identifier = 0xABCD0000 + g_encoder['header'].version = 1 + + # Add fake NoneID material + # + none_material = c_uint32(1234) + none_material.name = "" + encoder_process_material( none_material ) + + # Add root node + # + root = mdl_node() + root.co[0] = 0 + root.co[1] = 0 + root.co[2] = 0 + root.q[0] = 0 + root.q[1] = 0 + root.q[2] = 0 + root.q[3] = 1 + root.s[0] = 1 + root.s[1] = 1 + root.s[2] = 1 + root.pstr_name = encoder_process_pstr('') + root.submesh_start = 0 + root.submesh_count = 0 + root.offset = 0 + root.classtype = 0 + root.parent = 0xffffffff + + g_encoder['data']['node'] += [root] +#} + + +# fill with 0x00 until a multiple of align. Returns how many bytes it added +# +def bytearray_align_to( buffer, align, offset=0 ): +#{ + count = 0 + + while ((len(buffer)+offset) % align) != 0: + #{ + buffer.extend( b'\0' ) + count += 1 + #} + + return count +#} + +# Add a string to the string buffer except if it already exists there then we +# just return its ID. +# +def encoder_process_pstr( s ): +#{ + global g_encoder + + cache = g_encoder['string_cache'] + + if s in cache: + return cache[s] + + cache[s] = len( g_encoder['data']['strings'] ) + + buffer = g_encoder['data']['strings'] + buffer.extend( s.encode('utf-8') ) + buffer.extend( b'\0' ) + + bytearray_align_to( buffer, 4 ) + return cache[s] +#} + +# Add a material to the material buffer. Returns 0 (None ID) if invalid +# +def encoder_process_material( mat ): +#{ + global g_encoder + + if mat == None: + return 0 + + cache = g_encoder['material_cache'] + buffer = g_encoder['data']['material'] + + if mat.name in cache: + return cache[mat.name] + + cache[mat.name] = len( buffer ) + + dest = mdl_material() + dest.pstr_name = encoder_process_pstr( mat.name ) + buffer += [dest] + + return cache[mat.name] +#} + +# Create a tree structure containing all the objects in the collection +# +def encoder_build_scene_graph( collection ): +#{ + global g_encoder + + print( " creating scene graph" ) + + # initialize root + # + graph = g_encoder['scene_graph'] + graph_lookup = g_encoder['graph_lookup'] + graph["obj"] = None + graph["depth"] = 0 + graph["children"] = [] + graph["uid"] = 0 + graph["parent"] = None + + def _new_uid(): + #{ + global g_encoder + uid = g_encoder['uid_count'] + g_encoder['uid_count'] += 1 + return uid + #} + + for obj in collection.all_objects: + #{ + if obj.parent: continue + + def _extend( p, n, d ): + #{ + uid = _new_uid() + tree = {} + tree["uid"] = uid + tree["children"] = [] + tree["depth"] = d + tree["obj"] = n + tree["parent"] = p + n.cv_data.uid = uid - sm.name = obj.name.encode('utf-8') - - for vert in data.vertices: - v = model_vert() - v.co[0] = vert.co[0] - v.co[1] = vert.co[2] - v.co[2] = -vert.co[1] - v.colour[0] = 1.0 - v.colour[1] = 1.0 - v.colour[2] = 1.0 - v.colour[3] = 1.0 - vertex_buffer += [v] + # Descend into amature + # + if n.type == 'ARMATURE': + #{ + tree["bones"] = [None] # None is the root transform + tree["ik_count"] = 0 + tree["collider_count"] = 0 - for i in range(3): - sm.bbx[0][i] = min( sm.bbx[0][i], v.co[i] ) - sm.bbx[1][i] = max( sm.bbx[1][i], v.co[i] ) - - for l in data.loops: - pvert = vertex_buffer[l.vertex_index + sm.vertex_start] - norm = l.normal - pvert.norm[0] = norm[0] - pvert.norm[1] = norm[2] - pvert.norm[2] = -norm[1] - - #if data.vertex_colors: - # colour = data.vertex_colors.active.data[ l.index ].color - # pvert.colour[0] = colour[0] + # Here also collects some information about constraints, ik and + # counts colliders for the armature. + # + def _extendb( p, n, d ): + #{ + nonlocal tree + + btree = {} + btree["bone"] = n + btree["linked_armature"] = tree + btree["uid"] = _new_uid() + btree["children"] = [] + btree["depth"] = d + btree["parent"] = p + tree["bones"] += [n.name] + + for c in n.children: + #{ + _extendb( btree, c, d+1 ) + #} + + for c in tree['obj'].pose.bones[n.name].constraints: + #{ + if c.type == 'IK': + #{ + btree["ik_target"] = c.subtarget + btree["ik_pole"] = c.pole_subtarget + tree["ik_count"] += 1 + #} + #} + + if n.cv_data.collider: + tree['collider_count'] += 1 + + btree['deform'] = n.use_deform + p['children'] += [btree] + #} + + for b in n.data.bones: + if not b.parent: + _extendb( tree, b, d+1 ) + #} + #} + # Recurse into children of this object + # + for obj1 in n.children: + #{ + nonlocal collection + for c1 in obj1.users_collection: + #{ + if c1 == collection: + #{ + _extend( tree, obj1, d+1 ) + break + #} + #} + #} + + p["children"] += [tree] + graph_lookup[n] = tree + + #} + + _extend( graph, obj, 1 ) + + #} +#} + + +# Kind of a useless thing i made but it looks cool and adds complexity!!1 +# +def encoder_graph_iterator( root ): +#{ + for c in root['children']: + #{ + yield c + yield from encoder_graph_iterator(c) + #} +#} + + +# Push a vertex into the model file, or return a cached index (c_uint32) +# +def encoder_vertex_push( vertex_reference, co,norm,uv,colour,groups,weights ): +#{ + global g_encoder + buffer = g_encoder['data']['vertex'] + + TOLERENCE = 4 + m = float(10**TOLERENCE) + + # Would be nice to know if this can be done faster than it currently runs, + # its quite slow. + # + key = (int(co[0]*m+0.5), + int(co[1]*m+0.5), + int(co[2]*m+0.5), + int(norm[0]*m+0.5), + int(norm[1]*m+0.5), + int(norm[2]*m+0.5), + int(uv[0]*m+0.5), + int(uv[1]*m+0.5), + colour[0]*m+0.5, # these guys are already quantized + colour[1]*m+0.5, # . + colour[2]*m+0.5, # . + colour[3]*m+0.5, # . + weights[0]*m+0.5, # v + weights[1]*m+0.5, + weights[2]*m+0.5, + weights[3]*m+0.5, + groups[0]*m+0.5, + groups[1]*m+0.5, + groups[2]*m+0.5, + groups[3]*m+0.5) + + if key in vertex_reference: + return vertex_reference[key] + else: + #{ + index = c_uint32( len(vertex_reference) ) + vertex_reference[key] = index + + v = mdl_vert() + v.co[0] = co[0] + v.co[1] = co[2] + v.co[2] = -co[1] + v.norm[0] = norm[0] + v.norm[1] = norm[2] + v.norm[2] = -norm[1] + v.uv[0] = uv[0] + v.uv[1] = uv[1] + v.colour[0] = colour[0] + v.colour[1] = colour[1] + v.colour[2] = colour[2] + v.colour[3] = colour[3] + v.weights[0] = weights[0] + v.weights[1] = weights[1] + v.weights[2] = weights[2] + v.weights[3] = weights[3] + v.groups[0] = groups[0] + v.groups[1] = groups[1] + v.groups[2] = groups[2] + v.groups[3] = groups[3] + + buffer += [v] + return index + #} +#} + + +# Compile a mesh (or use one from the cache) onto node, based on node_def +# No return value +# +def encoder_compile_mesh( node, node_def ): +#{ + global g_encoder + + graph = g_encoder['scene_graph'] + graph_lookup = g_encoder['graph_lookup'] + mesh_cache = g_encoder['mesh_cache'] + obj = node_def['obj'] + armature_def = None + can_use_cache = True + + # Check for modifiers that typically change the data per-instance + # there is no well defined rule for the choices here, its just what i've + # needed while producing the game. + # + # It may be possible to detect these cases automatically. + # + for mod in obj.modifiers: + #{ + if mod.type == 'DATA_TRANSFER' or mod.type == 'SHRINKWRAP' or \ + mod.type == 'BOOLEAN' or mod.type == 'CURVE' or \ + mod.type == 'ARRAY': + #{ + can_use_cache = False + #} + + if mod.type == 'ARMATURE': + armature_def = graph_lookup[mod.object] + + # Check the cache first + # + if can_use_cache and (obj.data.name in mesh_cache): + #{ + ref = mesh_cache[obj.data.name] + node.submesh_start = ref.submesh_start + node.submesh_count = ref.submesh_count + return + #} + + # Compile a whole new mesh + # + node.submesh_start = len( g_encoder['data']['submesh'] ) + node.submesh_count = 0 + + default_mat = c_uint32(12345) + default_mat.name = "" + + dgraph = bpy.context.evaluated_depsgraph_get() + data = obj.evaluated_get(dgraph).data + data.calc_loop_triangles() + data.calc_normals_split() + + # Mesh is split into submeshes based on their material + # + mat_list = data.materials if len(data.materials) > 0 else [default_mat] + for material_id, mat in enumerate(mat_list): + #{ + mref = {} + + sm = mdl_submesh() + sm.indice_start = len( g_encoder['data']['indice'] ) + sm.vertex_start = len( g_encoder['data']['vertex'] ) + sm.vertex_count = 0 + sm.indice_count = 0 + sm.material_id = encoder_process_material( mat ) + + for i in range(3): + #{ + sm.bbx[0][i] = 999999 + sm.bbx[1][i] = -999999 + #} + + # Keep a reference to very very very similar vertices + # + vertex_reference = {} + + # Write the vertex / indice data + # + for tri_index, tri in enumerate(data.loop_triangles): + #{ + if tri.material_index != material_id: + continue + + for j in range(3): + #{ + vert = data.vertices[tri.vertices[j]] + li = tri.loops[j] + vi = data.loops[li].vertex_index + + # Gather vertex information + # + co = vert.co + norm = data.loops[li].normal + uv = (0,0) + colour = (255,255,255,255) + groups = [0,0,0,0] + weights = [0,0,0,0] + + # Uvs + # if data.uv_layers: - uv = data.uv_layers.active.data[ l.index ].uv - pvert.uv[0] = uv[0] - pvert.uv[1] = uv[1] + uv = data.uv_layers.active.data[li].uv + + # Vertex Colours + # + if data.vertex_colors: + #{ + colour = data.vertex_colors.active.data[li].color + colour = (int(colour[0]*255.0),\ + int(colour[1]*255.0),\ + int(colour[2]*255.0),\ + int(colour[3]*255.0)) + #} + + # Weight groups: truncates to the 3 with the most influence. The + # fourth bone ID is never used by the shader so it is + # always 0 + # + if armature_def: + #{ + src_groups = [_ for _ in data.vertices[vi].groups \ + if obj.vertex_groups[_.group].name in \ + armature_def['bones']] + + weight_groups = sorted( src_groups, key = \ + lambda a: a.weight, reverse=True ) + tot = 0.0 + for ml in range(3): + #{ + if len(weight_groups) > ml: + #{ + g = weight_groups[ml] + name = obj.vertex_groups[g.group].name + weight = g.weight + + weights[ml] = weight + groups[ml] = armature_def['bones'].index(name) + tot += weight + #} + #} + + if len(weight_groups) > 0: + #{ + inv_norm = (1.0/tot) * 65535.0 + for ml in range(3): + #{ + weights[ml] = int( weights[ml] * inv_norm ) + weights[ml] = min( weights[ml], 65535 ) + weights[ml] = max( weights[ml], 0 ) + #} + #} + + # Add vertex and expand bound box + # + index = encoder_vertex_push( vertex_reference, co, \ + norm, \ + uv, \ + colour, \ + groups, \ + weights ) + g_encoder['data']['indice'] += [index] + #} + #} + + # How many unique verts did we add in total + # + sm.vertex_count = len(g_encoder['data']['vertex']) - sm.vertex_start + sm.indice_count = len(g_encoder['data']['indice']) - sm.indice_start + + # Make sure bounding box isn't -inf -> inf if no vertices + # + if sm.vertex_count == 0: + for j in range(2): + for i in range(3): + sm.bbx[j][i] = 0 + else: + #{ + for j in range(sm.vertex_count): + #{ + vert = g_encoder['data']['vertex'][ sm.vertex_start + j ] - for tri in data.loop_triangles: - indice_buffer += [c_uint32(tri.vertices[_]) for _ in range(3)] + for i in range(3): + #{ + sm.bbx[0][i] = min( sm.bbx[0][i], vert.co[i] ) + sm.bbx[1][i] = max( sm.bbx[1][i], vert.co[i] ) + #} + #} + #} + + # Add submesh to encoder + # + g_encoder['data']['submesh'] += [sm] + node.submesh_count += 1 - layers += [sm] - header.layer_count += 1 - header.vertex_count += sm.vertex_count - header.indice_count += sm.indice_count + #} + + # Save a reference to this node since we want to reuse the submesh indices + # later. + g_encoder['mesh_cache'][obj.data.name] = node +#} - fp.write( bytearray( header ) ) - for l in layers: - fp.write( bytearray(l) ) - for v in vertex_buffer: - fp.write( bytearray(v) ) - for i in indice_buffer: - fp.write( bytearray(i) ) +def encoder_compile_ent_as( name, node, node_def ): +#{ + global g_encoder + + if name == 'classtype_none': + #{ + node.offset = 0 + node.classtype = 0 + return + #} + elif name not in globals(): + #{ + print( "Classtype '" +name + "' is unknown!" ) + return + #} + + buffer = g_encoder['data']['entdata'] + node.offset = len(buffer) + + cl = globals()[ name ] + inst = cl() + inst.encode_obj( node, node_def ) + + buffer.extend( bytearray(inst) ) + bytearray_align_to( buffer, 4 ) +#} + +# Compiles animation data into model and gives us some extra node_def entries +# +def encoder_compile_armature( node, node_def ): +#{ + global g_encoder + + entdata = g_encoder['data']['entdata'] + animdata = g_encoder['data']['anim'] + keyframedata = g_encoder['data']['keyframe'] + mesh_cache = g_encoder['mesh_cache'] + obj = node_def['obj'] + bones = node_def['bones'] + + # extra info + node_def['anim_start'] = len(animdata) + node_def['anim_count'] = 0 + + # Compile anims + # + if obj.animation_data: + #{ + # So we can restore later + # + previous_frame = bpy.context.scene.frame_current + previous_action = obj.animation_data.action + POSE_OR_REST_CACHE = obj.data.pose_position + obj.data.pose_position = 'POSE' + + for NLALayer in obj.animation_data.nla_tracks: + #{ + for NLAStrip in NLALayer.strips: + #{ + # set active + # + for a in bpy.data.actions: + #{ + if a.name == NLAStrip.name: + #{ + obj.animation_data.action = a + break + #} + #} + + # Clip to NLA settings + # + anim_start = int(NLAStrip.action_frame_start) + anim_end = int(NLAStrip.action_frame_end) + + # Export strips + # + anim = mdl_animation() + anim.pstr_name = encoder_process_pstr( NLAStrip.action.name ) + anim.rate = 30.0 + anim.offset = len(keyframedata) + anim.length = anim_end-anim_start + + # Export the keyframes + for frame in range(anim_start,anim_end): + #{ + bpy.context.scene.frame_set(frame) + + for bone_name in bones: + #{ + for pb in obj.pose.bones: + #{ + if pb.name != bone_name: continue + + rb = obj.data.bones[ bone_name ] + + # relative bone matrix + if rb.parent is not None: + #{ + offset_mtx = rb.parent.matrix_local + offset_mtx = offset_mtx.inverted_safe() @ \ + rb.matrix_local + + inv_parent = pb.parent.matrix @ offset_mtx + inv_parent.invert_safe() + fpm = inv_parent @ pb.matrix + #} + else: + #{ + bone_mtx = rb.matrix.to_4x4() + local_inv = rb.matrix_local.inverted_safe() + fpm = bone_mtx @ local_inv @ pb.matrix + #} + + loc, rot, sca = fpm.decompose() + + # local position + final_pos = Vector(( loc[0], loc[2], -loc[1] )) + + # rotation + lc_m = pb.matrix_channel.to_3x3() + if pb.parent is not None: + #{ + smtx = pb.parent.matrix_channel.to_3x3() + lc_m = smtx.inverted() @ lc_m + #} + rq = lc_m.to_quaternion() + + kf = mdl_keyframe() + kf.co[0] = final_pos[0] + kf.co[1] = final_pos[1] + kf.co[2] = final_pos[2] + + kf.q[0] = rq[1] + kf.q[1] = rq[3] + kf.q[2] = -rq[2] + kf.q[3] = rq[0] + + # scale + kf.s[0] = sca[0] + kf.s[1] = sca[2] + kf.s[2] = sca[1] + + keyframedata += [kf] + break + #} + #} + #} + + # Add to animation buffer + # + animdata += [anim] + node_def['anim_count'] += 1 + + # Report progress + # + status_name = F" " + " |"*(node_def['depth']-1) + print( F"{status_name} | *anim: {NLAStrip.action.name}" ) + #} + #} + + # Restore context to how it was before + # + bpy.context.scene.frame_set( previous_frame ) + obj.animation_data.action = previous_action + obj.data.pose_position = POSE_OR_REST_CACHE + #} +#} + +# We are trying to compile this node_def +# +def encoder_process_definition( node_def ): +#{ + global g_encoder + + # data sources for object/bone are taken differently + # + if 'obj' in node_def: + #{ + obj = node_def['obj'] + obj_type = obj.type + obj_co = obj.location + + if obj_type == 'ARMATURE': + obj_classtype = 'classtype_skeleton' + else: + #{ + obj_classtype = obj.cv_data.classtype + + # Check for armature deform + # + for mod in obj.modifiers: + #{ + if mod.type == 'ARMATURE': + #{ + obj_classtype = 'classtype_skin' + + # Make sure to freeze armature in rest while we collect + # vertex information + # + armature_def = g_encoder['graph_lookup'][mod.object] + POSE_OR_REST_CACHE = armature_def['obj'].data.pose_position + armature_def['obj'].data.pose_position = 'REST' + node_def['linked_armature'] = armature_def + break + #} + #} + #} + #} + + elif 'bone' in node_def: + #{ + obj = node_def['bone'] + obj_type = 'BONE' + obj_co = obj.head_local + obj_classtype = 'classtype_bone' + #} + + # Create node + # + node = mdl_node() + node.pstr_name = encoder_process_pstr( obj.name ) + + if node_def["parent"]: + node.parent = node_def["parent"]["uid"] + + # Setup transform + # + node.co[0] = obj_co[0] + node.co[1] = obj_co[2] + node.co[2] = -obj_co[1] + + # Convert rotation quat to our space type + # + quat = obj.matrix_local.to_quaternion() + node.q[0] = quat[1] + node.q[1] = quat[3] + node.q[2] = -quat[2] + node.q[3] = quat[0] + + # Bone scale is just a vector to the tail + # + if obj_type == 'BONE': + #{ + node.s[0] = obj.tail_local[0] - node.co[0] + node.s[1] = obj.tail_local[2] - node.co[1] + node.s[2] = -obj.tail_local[1] - node.co[2] + #} + else: + #{ + node.s[0] = obj.scale[0] + node.s[1] = obj.scale[2] + node.s[2] = obj.scale[1] + #} + + # Report status + # + tot_uid = g_encoder['uid_count']-1 + obj_uid = node_def['uid'] + obj_depth = node_def['depth']-1 + + status_id = F" [{obj_uid: 3}/{tot_uid}]" + " |"*obj_depth + status_name = status_id + F" L {obj.name}" + + if obj_classtype != 'classtype_none': status_type = obj_classtype + else: status_type = obj_type + + status_parent = F"{node.parent: 3}" + status_armref = "" + + if obj_classtype == 'classtype_skin': + status_armref = F" [armature -> {armature_def['obj'].cv_data.uid}]" + + print(F"{status_name:<32} {status_type:<22} {status_parent} {status_armref}") + + # Process mesh if needed + # + if obj_type == 'MESH': + #{ + encoder_compile_mesh( node, node_def ) + #} + elif obj_type == 'ARMATURE': + #{ + encoder_compile_armature( node, node_def ) + #} + + encoder_compile_ent_as( obj_classtype, node, node_def ) + + # Make sure to reset the armature we just mucked about with + # + if obj_classtype == 'classtype_skin': + armature_def['obj'].data.pose_position = POSE_OR_REST_CACHE + + g_encoder['data']['node'] += [node] +#} + +# The post processing step or the pre processing to the writing step +# +def encoder_write_to_file( path ): +#{ + global g_encoder + + # Compile down to a byte array + # + header = g_encoder['header'] + file_pos = sizeof(header) + file_data = bytearray() + print( " Compositing data arrays" ) + + for array_name in g_encoder['data']: + #{ + file_pos += bytearray_align_to( file_data, 16, sizeof(header) ) + arr = g_encoder['data'][array_name] + + setattr( header, array_name + "_offset", file_pos ) + + print( F" {array_name:<16} @{file_pos:> 8X}[{len(arr)}]" ) + + if isinstance( arr, bytearray ): + #{ + setattr( header, array_name + "_size", len(arr) ) + + file_data.extend( arr ) + file_pos += len(arr) + #} + else: + #{ + setattr( header, array_name + "_count", len(arr) ) + + for item in arr: + #{ + bbytes = bytearray(item) + file_data.extend( bbytes ) + file_pos += sizeof(item) + #} + #} + #} + + # This imperitive for this field to be santized in the future! + # + header.file_length = file_pos + + print( " Writing file" ) + # Write header and data chunk to file + # + fp = open( path, "wb" ) + fp.write( bytearray( header ) ) + fp.write( file_data ) fp.close() +#} + +# Main compiler, uses string as the identifier for the collection +# +def write_model(collection_name): +#{ + global g_encoder + print( F"Model graph | Create mode '{collection_name}'" ) + + collection = bpy.data.collections[collection_name] + + encoder_init() + encoder_build_scene_graph( collection ) + + # Compile + # + print( " Comping objects" ) + it = encoder_graph_iterator( g_encoder['scene_graph'] ) + for node_def in it: + encoder_process_definition( node_def ) + + # Write + # + # TODO HOLY + path = F"/home/harry/Documents/carve/models_src/{collection_name}.mdl" + encoder_write_to_file( path ) + + print( F"Completed {collection_name}.mdl" ) +#} + + +# Clicky clicky GUI +# ------------------------------------------------------------------------------ + +cv_view_draw_handler = None +cv_view_shader = gpu.shader.from_builtin('3D_SMOOTH_COLOR') + +def cv_draw(): + global cv_view_shader + cv_view_shader.bind() + gpu.state.depth_mask_set(False) + gpu.state.line_width_set(2.0) + gpu.state.face_culling_set('BACK') + gpu.state.depth_test_set('LESS') + gpu.state.blend_set('NONE') + + verts = [] + colours = [] + + #def drawbezier(p0,h0,p1,h1,c0,c1): + # nonlocal verts, colours + + # verts += [p0] + # verts += [h0] + # colours += [(0.5,0.5,0.5,1.0),(0.5,0.5,0.5,1)] + # verts += [p1] + # verts += [h1] + # colours += [(1.0,1.0,1,1),(1,1,1,1)] + # + # last = p0 + # for i in range(10): + # t = (i+1)/10 + # a0 = 1-t + + # tt = t*t + # ttt = tt*t + # p=ttt*p1+(3*tt-3*ttt)*h1+(3*ttt-6*tt+3*t)*h0+(3*tt-ttt-3*t+1)*p0 + # verts += [(last[0],last[1],last[2])] + # verts += [(p[0],p[1],p[2])] + # colours += [c0*a0+c1*(1-a0),c0*a0+c1*(1-a0)] + # last = p + + course_count = 0 + + def drawbhandle(obj, direction, colour): + nonlocal verts, colours + p0 = obj.location + h0 = obj.matrix_world @ Vector((0,direction,0)) + verts += [p0] + verts += [h0] + colours += [colour,colour] + + def drawbezier(p0,h0,p1,h1,c0,c1): + nonlocal verts, colours + + last = p0 + for i in range(10): + t = (i+1)/10 + a0 = 1-t + + tt = t*t + ttt = tt*t + p=ttt*p1+(3*tt-3*ttt)*h1+(3*ttt-6*tt+3*t)*h0+(3*tt-ttt-3*t+1)*p0 + verts += [(last[0],last[1],last[2])] + verts += [(p[0],p[1],p[2])] + colours += [c0*a0+c1*(1-a0),c0*a0+c1*(1-a0)] + last = p + + def drawsbpath(o0,o1,c0,c1,s0,s1): + nonlocal course_count + + offs = ((course_count % 2)*2-1) * course_count * 0.02 + + p0 = o0.matrix_world @ Vector((offs, 0,0)) + h0 = o0.matrix_world @ Vector((offs, s0,0)) + p1 = o1.matrix_world @ Vector((offs, 0,0)) + h1 = o1.matrix_world @ Vector((offs,-s1,0)) + drawbezier(p0,h0,p1,h1,c0,c1) + + def drawbpath(o0,o1,c0,c1): + drawsbpath(o0,o1,c0,c1,1.0,1.0) + + def drawbline(p0,p1,c0,c1): + nonlocal verts, colours + verts += [p0,p1] + colours += [c0,c1] + + for obj in bpy.context.collection.objects: + if obj.type == 'ARMATURE': + for bone in obj.data.bones: + if bone.cv_data.collider and obj.data.pose_position == 'REST': + c = bone.head_local + a = bone.cv_data.v0 + b = bone.cv_data.v1 + + vs = [None]*8 + vs[0]=obj.matrix_world@Vector((c[0]+a[0],c[1]+a[1],c[2]+a[2])) + vs[1]=obj.matrix_world@Vector((c[0]+a[0],c[1]+b[1],c[2]+a[2])) + vs[2]=obj.matrix_world@Vector((c[0]+b[0],c[1]+b[1],c[2]+a[2])) + vs[3]=obj.matrix_world@Vector((c[0]+b[0],c[1]+a[1],c[2]+a[2])) + vs[4]=obj.matrix_world@Vector((c[0]+a[0],c[1]+a[1],c[2]+b[2])) + vs[5]=obj.matrix_world@Vector((c[0]+a[0],c[1]+b[1],c[2]+b[2])) + vs[6]=obj.matrix_world@Vector((c[0]+b[0],c[1]+b[1],c[2]+b[2])) + vs[7]=obj.matrix_world@Vector((c[0]+b[0],c[1]+a[1],c[2]+b[2])) + + indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\ + (0,4),(1,5),(2,6),(3,7)] + + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0.5,0.5,0.5,0.5),(0.5,0.5,0.5,0.5)] + + center=obj.matrix_world@c + + def _angle_lim( major, minor, amin, amax, colour ): + nonlocal verts, colours + f = 0.05 + ay = major*f + ax = minor*f + + for x in range(16): + t0 = x/16 + t1 = (x+1)/16 + a0 = amin*(1.0-t0)+amax*t0 + a1 = amin*(1.0-t1)+amax*t1 + + p0 = c + major*f*math.cos(a0) + minor*f*math.sin(a0) + p1 = c + major*f*math.cos(a1) + minor*f*math.sin(a1) + + p0=obj.matrix_world @ p0 + p1=obj.matrix_world @ p1 + verts += [p0,p1] + colours += [colour,colour] + + if x == 0: + verts += [p0,c] + colours += [colour,colour] + if x == 15: + verts += [p1,c] + colours += [colour,colour] + + verts += [c+major*1.2*f,c+major*f*0.8] + colours += [colour,colour] + + if bone.cv_data.con0: + _angle_lim( Vector((0,1,0)),Vector((0,0,1)), \ + bone.cv_data.mins[0], bone.cv_data.maxs[0], \ + (1,0,0,1)) + _angle_lim( Vector((0,0,1)),Vector((1,0,0)), \ + bone.cv_data.mins[1], bone.cv_data.maxs[1], \ + (0,1,0,1)) + _angle_lim( Vector((1,0,0)),Vector((0,1,0)), \ + bone.cv_data.mins[2], bone.cv_data.maxs[2], \ + (0,0,1,1)) + + + if obj.cv_data.classtype == 'classtype_gate': + if obj.type == 'MESH': + dims = obj.data.cv_data.v0 + else: + dims = obj.cv_data.v0 + + vs = [None]*9 + c = Vector((0,0,dims[2])) + + vs[0] = obj.matrix_world @ Vector((-dims[0],0.0,-dims[1]+dims[2])) + vs[1] = obj.matrix_world @ Vector((-dims[0],0.0, dims[1]+dims[2])) + vs[2] = obj.matrix_world @ Vector(( dims[0],0.0, dims[1]+dims[2])) + vs[3] = obj.matrix_world @ Vector(( dims[0],0.0,-dims[1]+dims[2])) + vs[4] = obj.matrix_world @ (c+Vector((-1,0,-2))) + vs[5] = obj.matrix_world @ (c+Vector((-1,0, 2))) + vs[6] = obj.matrix_world @ (c+Vector(( 1,0, 2))) + vs[7] = obj.matrix_world @ (c+Vector((-1,0, 0))) + vs[8] = obj.matrix_world @ (c+Vector(( 1,0, 0))) + + indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(7,8)] + + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(1,1,0,1),(1,1,0,1)] + + sw = (0.4,0.4,0.4,0.2) + if obj.cv_data.target != None: + drawbline( obj.location, obj.cv_data.target.location, sw,sw ) + + elif obj.cv_data.classtype == 'classtype_route_node': + sw = Vector((0.4,0.4,0.4,0.2)) + sw2 = Vector((1.5,0.2,0.2,0.0)) + if obj.cv_data.target != None: + drawbpath( obj, obj.cv_data.target, sw, sw ) + if obj.cv_data.target1 != None: + drawbpath( obj, obj.cv_data.target1, sw, sw ) + + drawbhandle( obj, 1.0, (0.8,0.8,0.8,1.0) ) + drawbhandle( obj, -1.0, (0.4,0.4,0.4,1.0) ) + + p1 = obj.location+ \ + obj.matrix_world.to_quaternion() @ Vector((0,0,-6+1.5)) + drawbline( obj.location, p1, sw,sw2 ) + + elif obj.cv_data.classtype == 'classtype_achievement_box': + a = Vector((-1,-1,-1)) + b = Vector((1,1,1)) + + vs = [None]*8 + vs[0] = obj.matrix_world @ Vector((a[0], a[1], a[2])) + vs[1] = obj.matrix_world @ Vector((a[0], b[1], a[2])) + vs[2] = obj.matrix_world @ Vector((b[0], b[1], a[2])) + vs[3] = obj.matrix_world @ Vector((b[0], a[1], a[2])) + vs[4] = obj.matrix_world @ Vector((a[0], a[1], b[2])) + vs[5] = obj.matrix_world @ Vector((a[0], b[1], b[2])) + vs[6] = obj.matrix_world @ Vector((b[0], b[1], b[2])) + vs[7] = obj.matrix_world @ Vector((b[0], a[1], b[2])) + + indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\ + (0,4),(1,5),(2,6),(3,7)] + + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0,1,0,1),(0,1,0,1)] + + if obj.cv_data.target != None: + vs = [None]*2 + vs[0] = obj.location + vs[1] = obj.cv_data.target.location + indices = [(0,1)] + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0,1,1,1),(0,1,1,1)] + + + elif obj.cv_data.classtype == 'classtype_block': + a = obj.data.cv_data.v0 + b = obj.data.cv_data.v1 + + vs = [None]*8 + vs[0] = obj.matrix_world @ Vector((a[0], a[1], a[2])) + vs[1] = obj.matrix_world @ Vector((a[0], b[1], a[2])) + vs[2] = obj.matrix_world @ Vector((b[0], b[1], a[2])) + vs[3] = obj.matrix_world @ Vector((b[0], a[1], a[2])) + vs[4] = obj.matrix_world @ Vector((a[0], a[1], b[2])) + vs[5] = obj.matrix_world @ Vector((a[0], b[1], b[2])) + vs[6] = obj.matrix_world @ Vector((b[0], b[1], b[2])) + vs[7] = obj.matrix_world @ Vector((b[0], a[1], b[2])) + + indices = [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),\ + (0,4),(1,5),(2,6),(3,7)] + + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(1,1,0,1),(1,1,0,1)] + + elif obj.cv_data.classtype == 'classtype_capsule': + h = obj.data.cv_data.v0[0] + r = obj.data.cv_data.v0[1] + + vs = [None]*10 + vs[0] = obj.matrix_world @ Vector((0.0,0.0, h*0.5 )) + vs[1] = obj.matrix_world @ Vector((0.0,0.0,-h*0.5 )) + vs[2] = obj.matrix_world @ Vector(( r,0.0, h*0.5-r)) + vs[3] = obj.matrix_world @ Vector(( -r,0.0, h*0.5-r)) + vs[4] = obj.matrix_world @ Vector(( r,0.0,-h*0.5+r)) + vs[5] = obj.matrix_world @ Vector(( -r,0.0,-h*0.5+r)) + vs[6] = obj.matrix_world @ Vector((0.0, r , h*0.5-r)) + vs[7] = obj.matrix_world @ Vector((0.0,-r , h*0.5-r)) + vs[8] = obj.matrix_world @ Vector((0.0, r ,-h*0.5+r)) + vs[9] = obj.matrix_world @ Vector((0.0,-r ,-h*0.5+r)) + + indices = [(0,1),(2,3),(4,5),(6,7),(8,9)] + + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0.5,1,0,1),(0.5,1,0,1)] + + elif obj.cv_data.classtype == 'classtype_spawn': + vs = [None]*4 + vs[0] = obj.matrix_world @ Vector((0,0,0)) + vs[1] = obj.matrix_world @ Vector((0,2,0)) + vs[2] = obj.matrix_world @ Vector((0.5,1,0)) + vs[3] = obj.matrix_world @ Vector((-0.5,1,0)) + indices = [(0,1),(1,2),(1,3)] + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0,1,1,1),(0,1,1,1)] + + elif obj.cv_data.classtype == 'classtype_route': + vs = [None]*2 + vs[0] = obj.location + vs[1] = obj.cv_data.target.location + indices = [(0,1)] + for l in indices: + v0 = vs[l[0]] + v1 = vs[l[1]] + verts += [(v0[0],v0[1],v0[2])] + verts += [(v1[0],v1[1],v1[2])] + colours += [(0,1,1,1),(0,1,1,1)] + + stack = [None]*64 + stack_i = [0]*64 + stack[0] = obj.cv_data.target + si = 1 + loop_complete = False + + while si > 0: + if stack_i[si-1] == 2: + si -= 1 + continue + + if si == 0: # Loop failed to complete + break + + node = stack[si-1] + + targets = [None,None] + targets[0] = node.cv_data.target + + if node.cv_data.classtype == 'classtype_route_node': + targets[1] = node.cv_data.target1 + + nextnode = targets[stack_i[si-1]] + stack_i[si-1] += 1 + + if nextnode != None: # branch + if nextnode == stack[0]: # Loop completed + loop_complete = True + break + + valid=True + for sj in range(si): + if stack[sj] == nextnode: # invalidated path + valid=False + break + + if valid: + stack_i[si] = 0 + stack[si] = nextnode + si += 1 + continue + + if loop_complete: + cc = Vector((obj.cv_data.colour[0],\ + obj.cv_data.colour[1],\ + obj.cv_data.colour[2],\ + 1.0)) + + for sj in range(si): + sk = (sj+1)%si + + if stack[sj].cv_data.classtype == 'classtype_gate' and \ + stack[sk].cv_data.classtype == 'classtype_gate': + dist = (stack[sj].location-stack[sk].location).magnitude + drawsbpath( stack[sj], stack[sk], cc*0.4, cc, dist, dist ) + + else: + drawbpath( stack[sj], stack[sk], cc, cc ) + + course_count += 1 + + elif obj.cv_data.classtype == 'classtype_car_path': + v0 = obj.matrix_world.to_quaternion() @ Vector((0,1,0)) + c0 = Vector((v0.x*0.5+0.5, v0.y*0.5+0.5, 0.0, 1.0)) + drawbhandle( obj, 1.0, (0.9,0.9,0.9,1.0) ) + + if obj.cv_data.target != None: + v1 = obj.cv_data.target.matrix_world.to_quaternion()@Vector((0,1,0)) + c1 = Vector((v1.x*0.5+0.5, v1.y*0.5+0.5, 0.0, 1.0)) + + drawbhandle( obj.cv_data.target, -1.0, (0.5,0.5,0.5,1.0) ) + drawbpath( obj, obj.cv_data.target, c0, c1 ) + + if obj.cv_data.target1 != None: + v1 = obj.cv_data.target1.matrix_world.to_quaternion()@Vector((0,1,0)) + c1 = Vector((v1.x*0.5+0.5, v1.y*0.5+0.5, 0.0, 1.0)) + + drawbhandle( obj.cv_data.target1, -1.0, (0.5,0.5,0.5,1.0) ) + drawbpath( obj, obj.cv_data.target1, c0, c1 ) + + lines = batch_for_shader(\ + cv_view_shader, 'LINES', \ + { "pos":verts, "color":colours }) + + lines.draw( cv_view_shader ) + +def cv_poll_target(scene, obj): + if obj == bpy.context.active_object: + return False + if obj.cv_data.classtype == 'classtype_none': + return False + return True + +class CV_MESH_SETTINGS(bpy.types.PropertyGroup): + v0: bpy.props.FloatVectorProperty(name="v0",size=3) + v1: bpy.props.FloatVectorProperty(name="v1",size=3) + v2: bpy.props.FloatVectorProperty(name="v2",size=3) + v3: bpy.props.FloatVectorProperty(name="v3",size=3) + +class CV_OBJ_SETTINGS(bpy.types.PropertyGroup): + uid: bpy.props.IntProperty( name="" ) + + strp: bpy.props.StringProperty( name="strp" ) + intp: bpy.props.IntProperty( name="intp" ) + fltp: bpy.props.FloatProperty( name="fltp" ) + + target: bpy.props.PointerProperty( type=bpy.types.Object, name="target", \ + poll=cv_poll_target ) + target1: bpy.props.PointerProperty( type=bpy.types.Object, name="target1", \ + poll=cv_poll_target ) + + colour: bpy.props.FloatVectorProperty(name="colour",subtype='COLOR',\ + min=0.0,max=1.0) + + classtype: bpy.props.EnumProperty( + name="Format", + items = [ + ('classtype_none', "classtype_none", "", 0), + ('classtype_gate', "classtype_gate", "", 1), + ('classtype_block', "classtype_block", "", 2), + ('classtype_spawn', "classtype_spawn", "", 3), + ('classtype_water', "classtype_water", "", 4), + ('classtype_car_path', "classtype_car_path", "", 5), + ('classtype_INSTANCE', "","", 6 ), + ('classtype_capsule', "classtype_capsule", "", 7 ), + ('classtype_route_node', "classtype_route_node", "", 8 ), + ('classtype_route', "classtype_route", "", 9 ), + ('classtype_bone',"classtype_bone","",10), + ('classtype_SKELETON', "","", 11 ), + ('classtype_SKIN',"","",12), + ('classtype_achievement_box',"classtype_achievement_box","",13), + ('classtype_audio',"classtype_audio","",14), + ]) + +class CV_BONE_SETTINGS(bpy.types.PropertyGroup): + collider: bpy.props.BoolProperty(name="Collider",default=False) + v0: bpy.props.FloatVectorProperty(name="v0",size=3) + v1: bpy.props.FloatVectorProperty(name="v1",size=3) + + con0: bpy.props.BoolProperty(name="Constriant 0",default=False) + mins: bpy.props.FloatVectorProperty(name="mins",size=3) + maxs: bpy.props.FloatVectorProperty(name="maxs",size=3) + +class CV_BONE_PANEL(bpy.types.Panel): + bl_label="Bone Config" + bl_idname="SCENE_PT_cv_bone" + bl_space_type='PROPERTIES' + bl_region_type='WINDOW' + bl_context='bone' + + def draw(_,context): + active_object = context.active_object + if active_object == None: return + + bone = active_object.data.bones.active + if bone == None: return + + _.layout.prop( bone.cv_data, "collider" ) + _.layout.prop( bone.cv_data, "v0" ) + _.layout.prop( bone.cv_data, "v1" ) + + _.layout.label( text="Angle Limits" ) + _.layout.prop( bone.cv_data, "con0" ) + _.layout.prop( bone.cv_data, "mins" ) + _.layout.prop( bone.cv_data, "maxs" ) + +class CV_SCENE_SETTINGS(bpy.types.PropertyGroup): + use_hidden: bpy.props.BoolProperty( name="use hidden", default=False ) + +class CV_OBJ_PANEL(bpy.types.Panel): + bl_label="Entity Config" + bl_idname="SCENE_PT_cv_entity" + bl_space_type='PROPERTIES' + bl_region_type='WINDOW' + bl_context="object" + + def draw(_,context): + active_object = bpy.context.active_object + if active_object == None: return + if active_object.type == 'ARMATURE': + #{ + row = _.layout.row() + row.enabled = False + row.label( text="This object has the intrinsic classtype of skeleton" ) + return + #} + + _.layout.prop( active_object.cv_data, "classtype" ) + + if active_object.cv_data.classtype == 'classtype_gate': + _.layout.prop( active_object.cv_data, "target" ) + + mesh = active_object.data + _.layout.label( text=F"(i) Data is stored in {mesh.name}" ) + _.layout.prop( mesh.cv_data, "v0" ) + + elif active_object.cv_data.classtype == 'classtype_car_path' or \ + active_object.cv_data.classtype == 'classtype_route_node': + _.layout.prop( active_object.cv_data, "target" ) + _.layout.prop( active_object.cv_data, "target1" ) + + elif active_object.cv_data.classtype == 'classtype_route': + _.layout.prop( active_object.cv_data, "target" ) + _.layout.prop( active_object.cv_data, "colour" ) + + elif active_object.cv_data.classtype == 'classtype_block': + mesh = active_object.data + + _.layout.label( text=F"(i) Data is stored in {mesh.name}" ) + _.layout.prop( mesh.cv_data, "v0" ) + _.layout.prop( mesh.cv_data, "v1" ) + _.layout.prop( mesh.cv_data, "v2" ) + _.layout.prop( mesh.cv_data, "v3" ) + elif active_object.cv_data.classtype == 'classtype_capsule': + mesh = active_object.data + _.layout.label( text=F"(i) Data is stored in {mesh.name}" ) + _.layout.prop( mesh.cv_data, "v0" ) + elif active_object.cv_data.classtype == 'classtype_achievement_box': + _.layout.prop( active_object.cv_data, "strp" ) + _.layout.prop( active_object.cv_data, "target" ) + elif active_object.cv_data.classtype == 'classtype_audio': + _.layout.prop( active_object.cv_data, "strp" ) + _.layout.prop( active_object.cv_data, "intp" ) + _.layout.prop( active_object.cv_data, "fltp" ) + +class CV_INTERFACE(bpy.types.Panel): + bl_idname = "VIEW3D_PT_carve" + bl_label = "Carve" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Carve" + + def draw(_, context): + layout = _.layout + layout.prop( context.scene.cv_data, "use_hidden") + layout.operator( "carve.compile_all" ) + +def test_compile(): + view_layer = bpy.context.view_layer + for col in view_layer.layer_collection.children["export"].children: + if not col.hide_viewport or bpy.context.scene.cv_data.use_hidden: + write_model( col.name ) + +class CV_COMPILE(bpy.types.Operator): + bl_idname="carve.compile_all" + bl_label="Compile All" + + def execute(_,context): + test_compile() + #cProfile.runctx("test_compile()",globals(),locals(),sort=1) + #for col in bpy.data.collections["export"].children: + # write_model( col.name ) + + return {'FINISHED'} + +classes = [CV_OBJ_SETTINGS,CV_OBJ_PANEL,CV_COMPILE,CV_INTERFACE,\ + CV_MESH_SETTINGS, CV_SCENE_SETTINGS, CV_BONE_SETTINGS,\ + CV_BONE_PANEL] + +def register(): + global cv_view_draw_handler + + for c in classes: + bpy.utils.register_class(c) + + bpy.types.Object.cv_data = bpy.props.PointerProperty(type=CV_OBJ_SETTINGS) + bpy.types.Mesh.cv_data = bpy.props.PointerProperty(type=CV_MESH_SETTINGS) + bpy.types.Scene.cv_data = bpy.props.PointerProperty(type=CV_SCENE_SETTINGS) + bpy.types.Bone.cv_data = bpy.props.PointerProperty(type=CV_BONE_SETTINGS) + + cv_view_draw_handler = bpy.types.SpaceView3D.draw_handler_add(\ + cv_draw,(),'WINDOW','POST_VIEW') + +def unregister(): + global cv_view_draw_handler + + for c in classes: + bpy.utils.unregister_class(c) -write_model( "test" ) -write_model( "free_dev" ) -write_model( "char_dev" ) -write_model( "skydome" ) -write_model( "cement_r1" ) + bpy.types.SpaceView3D.draw_handler_remove(cv_view_draw_handler,'WINDOW')